From d1c275b350f9ae74be92114f4819d6afb55d6007 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Sat, 30 May 2020 18:46:52 +0300 Subject: [PATCH 01/34] linker: MSVC supports linking static libraries as a whole archive --- src/librustc_codegen_ssa/back/linker.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/librustc_codegen_ssa/back/linker.rs b/src/librustc_codegen_ssa/back/linker.rs index 46c365efdb5fa..511c851fd31ab 100644 --- a/src/librustc_codegen_ssa/back/linker.rs +++ b/src/librustc_codegen_ssa/back/linker.rs @@ -704,12 +704,14 @@ impl<'a> Linker for MsvcLinker<'a> { } fn link_whole_staticlib(&mut self, lib: Symbol, _search_path: &[PathBuf]) { - // not supported? self.link_staticlib(lib); + self.cmd.arg(format!("/WHOLEARCHIVE:{}.lib", lib)); } fn link_whole_rlib(&mut self, path: &Path) { - // not supported? self.link_rlib(path); + let mut arg = OsString::from("/WHOLEARCHIVE:"); + arg.push(path); + self.cmd.arg(arg); } fn optimize(&mut self) { // Needs more investigation of `/OPT` arguments From d40e624a3625c7c2d68c949435fd883cd43dd065 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tomasz=20Mi=C4=85sko?= Date: Fri, 5 Jun 2020 00:00:00 +0000 Subject: [PATCH 02/34] compiletest: Add directives to detect sanitizer support Add needs-sanitizer-{address,leak,memory,thread} directive indicating that test requires target with support for specific sanitizer. This is an addition to the existing needs-sanitizer-support directive indicating that test requires a sanitizer runtime library. --- .../codegen/sanitizer-memory-track-orgins.rs | 4 +-- .../codegen/sanitizer-no-sanitize-inlining.rs | 6 ++-- src/test/codegen/sanitizer-no-sanitize.rs | 2 +- src/test/codegen/sanitizer-recover.rs | 5 ++-- .../sanitizer-cdylib-link/Makefile | 2 +- .../sanitizer-dylib-link/Makefile | 2 +- .../sanitizer-staticlib-link/Makefile | 2 +- src/test/rustdoc/sanitizer-option.rs | 1 + src/test/ui/sanitize/address.rs | 2 +- src/test/ui/sanitize/badfree.rs | 2 +- src/test/ui/sanitize/cfg.rs | 6 ++-- .../sanitize/issue-72154-lifetime-markers.rs | 2 +- src/test/ui/sanitize/leak.rs | 2 +- src/test/ui/sanitize/memory.rs | 3 +- .../new-llvm-pass-manager-thin-lto.rs | 2 +- src/test/ui/sanitize/thread.rs | 2 +- src/test/ui/sanitize/use-after-scope.rs | 2 +- src/tools/compiletest/src/header.rs | 28 +++++++++++++++---- src/tools/compiletest/src/header/tests.rs | 19 +++++++++++++ src/tools/compiletest/src/util.rs | 11 ++++++++ 20 files changed, 75 insertions(+), 30 deletions(-) diff --git a/src/test/codegen/sanitizer-memory-track-orgins.rs b/src/test/codegen/sanitizer-memory-track-orgins.rs index 8ea41c5d44bb1..4bd50508d1520 100644 --- a/src/test/codegen/sanitizer-memory-track-orgins.rs +++ b/src/test/codegen/sanitizer-memory-track-orgins.rs @@ -1,9 +1,7 @@ // Verifies that MemorySanitizer track-origins level can be controlled // with -Zsanitizer-memory-track-origins option. // -// needs-sanitizer-support -// only-linux -// only-x86_64 +// needs-sanitizer-memory // revisions:MSAN-0 MSAN-1 MSAN-2 MSAN-1-LTO MSAN-2-LTO // //[MSAN-0] compile-flags: -Zsanitizer=memory diff --git a/src/test/codegen/sanitizer-no-sanitize-inlining.rs b/src/test/codegen/sanitizer-no-sanitize-inlining.rs index d96e76618d325..b00441e4fc5ab 100644 --- a/src/test/codegen/sanitizer-no-sanitize-inlining.rs +++ b/src/test/codegen/sanitizer-no-sanitize-inlining.rs @@ -1,11 +1,9 @@ // Verifies that no_sanitize attribute prevents inlining when // given sanitizer is enabled, but has no effect on inlining otherwise. // -// needs-sanitizer-support -// only-x86_64 -// +// needs-sanitizer-address +// needs-sanitizer-leak // revisions: ASAN LSAN -// //[ASAN] compile-flags: -Zsanitizer=address -C opt-level=3 -Z mir-opt-level=3 //[LSAN] compile-flags: -Zsanitizer=leak -C opt-level=3 -Z mir-opt-level=3 diff --git a/src/test/codegen/sanitizer-no-sanitize.rs b/src/test/codegen/sanitizer-no-sanitize.rs index dfceb28c8dd10..1b2b18822e63e 100644 --- a/src/test/codegen/sanitizer-no-sanitize.rs +++ b/src/test/codegen/sanitizer-no-sanitize.rs @@ -1,7 +1,7 @@ // Verifies that no_sanitze attribute can be used to // selectively disable sanitizer instrumentation. // -// needs-sanitizer-support +// needs-sanitizer-address // compile-flags: -Zsanitizer=address #![crate_type="lib"] diff --git a/src/test/codegen/sanitizer-recover.rs b/src/test/codegen/sanitizer-recover.rs index 05b4ab5653cc8..719f219ce4ef1 100644 --- a/src/test/codegen/sanitizer-recover.rs +++ b/src/test/codegen/sanitizer-recover.rs @@ -1,9 +1,8 @@ // Verifies that AddressSanitizer and MemorySanitizer // recovery mode can be enabled with -Zsanitizer-recover. // -// needs-sanitizer-support -// only-linux -// only-x86_64 +// needs-sanitizer-address +// needs-sanitizer-memory // revisions:ASAN ASAN-RECOVER MSAN MSAN-RECOVER MSAN-RECOVER-LTO // no-prefer-dynamic // diff --git a/src/test/run-make-fulldeps/sanitizer-cdylib-link/Makefile b/src/test/run-make-fulldeps/sanitizer-cdylib-link/Makefile index 5d46be87eac6b..b11d4c4cab7cf 100644 --- a/src/test/run-make-fulldeps/sanitizer-cdylib-link/Makefile +++ b/src/test/run-make-fulldeps/sanitizer-cdylib-link/Makefile @@ -1,5 +1,5 @@ # needs-sanitizer-support -# only-x86_64 +# needs-sanitizer-address # only-linux -include ../tools.mk diff --git a/src/test/run-make-fulldeps/sanitizer-dylib-link/Makefile b/src/test/run-make-fulldeps/sanitizer-dylib-link/Makefile index f62c3a6654ed4..c2ebd2a6d8cac 100644 --- a/src/test/run-make-fulldeps/sanitizer-dylib-link/Makefile +++ b/src/test/run-make-fulldeps/sanitizer-dylib-link/Makefile @@ -1,5 +1,5 @@ # needs-sanitizer-support -# only-x86_64 +# needs-sanitizer-address # only-linux -include ../tools.mk diff --git a/src/test/run-make-fulldeps/sanitizer-staticlib-link/Makefile b/src/test/run-make-fulldeps/sanitizer-staticlib-link/Makefile index f56475b441f1a..5ceff16471cee 100644 --- a/src/test/run-make-fulldeps/sanitizer-staticlib-link/Makefile +++ b/src/test/run-make-fulldeps/sanitizer-staticlib-link/Makefile @@ -1,5 +1,5 @@ # needs-sanitizer-support -# only-x86_64 +# needs-sanitizer-address # only-linux -include ../tools.mk diff --git a/src/test/rustdoc/sanitizer-option.rs b/src/test/rustdoc/sanitizer-option.rs index 6af9ed3e33f66..a79b37ee08210 100644 --- a/src/test/rustdoc/sanitizer-option.rs +++ b/src/test/rustdoc/sanitizer-option.rs @@ -1,4 +1,5 @@ // needs-sanitizer-support +// needs-sanitizer-address // compile-flags: --test -Z sanitizer=address // // #43031: Verify that rustdoc passes `-Z` options to rustc. Use an extern diff --git a/src/test/ui/sanitize/address.rs b/src/test/ui/sanitize/address.rs index f8650cd86d51e..cee73b0425ad5 100644 --- a/src/test/ui/sanitize/address.rs +++ b/src/test/ui/sanitize/address.rs @@ -1,5 +1,5 @@ // needs-sanitizer-support -// only-x86_64 +// needs-sanitizer-address // // compile-flags: -Z sanitizer=address -O -g // diff --git a/src/test/ui/sanitize/badfree.rs b/src/test/ui/sanitize/badfree.rs index 1ca082c8b4704..095a6f4697b1c 100644 --- a/src/test/ui/sanitize/badfree.rs +++ b/src/test/ui/sanitize/badfree.rs @@ -1,5 +1,5 @@ // needs-sanitizer-support -// only-x86_64 +// needs-sanitizer-address // // compile-flags: -Z sanitizer=address -O // diff --git a/src/test/ui/sanitize/cfg.rs b/src/test/ui/sanitize/cfg.rs index 9c198543a8664..79dfe58f04d0b 100644 --- a/src/test/ui/sanitize/cfg.rs +++ b/src/test/ui/sanitize/cfg.rs @@ -2,8 +2,10 @@ // the `#[cfg(sanitize = "option")]` attribute is configured. // needs-sanitizer-support -// only-linux -// only-x86_64 +// needs-sanitizer-address +// needs-sanitizer-leak +// needs-sanitizer-memory +// needs-sanitizer-thread // check-pass // revisions: address leak memory thread //[address]compile-flags: -Zsanitizer=address --cfg address diff --git a/src/test/ui/sanitize/issue-72154-lifetime-markers.rs b/src/test/ui/sanitize/issue-72154-lifetime-markers.rs index 458f99143b648..b2e182238ce28 100644 --- a/src/test/ui/sanitize/issue-72154-lifetime-markers.rs +++ b/src/test/ui/sanitize/issue-72154-lifetime-markers.rs @@ -4,7 +4,7 @@ // miscompilation which was subsequently detected by AddressSanitizer as UB. // // needs-sanitizer-support -// only-x86_64 +// needs-sanitizer-address // // compile-flags: -Copt-level=0 -Zsanitizer=address // run-pass diff --git a/src/test/ui/sanitize/leak.rs b/src/test/ui/sanitize/leak.rs index 5c2f2cb4e868b..c9f10fe4f467e 100644 --- a/src/test/ui/sanitize/leak.rs +++ b/src/test/ui/sanitize/leak.rs @@ -1,5 +1,5 @@ // needs-sanitizer-support -// only-x86_64 +// needs-sanitizer-leak // // compile-flags: -Z sanitizer=leak -O // diff --git a/src/test/ui/sanitize/memory.rs b/src/test/ui/sanitize/memory.rs index 3e1cf4509a31f..a26649a580013 100644 --- a/src/test/ui/sanitize/memory.rs +++ b/src/test/ui/sanitize/memory.rs @@ -1,6 +1,5 @@ // needs-sanitizer-support -// only-linux -// only-x86_64 +// needs-sanitizer-memory // // compile-flags: -Z sanitizer=memory -Zsanitizer-memory-track-origins -O // diff --git a/src/test/ui/sanitize/new-llvm-pass-manager-thin-lto.rs b/src/test/ui/sanitize/new-llvm-pass-manager-thin-lto.rs index d0984bbe65fd5..64d6ccf340916 100644 --- a/src/test/ui/sanitize/new-llvm-pass-manager-thin-lto.rs +++ b/src/test/ui/sanitize/new-llvm-pass-manager-thin-lto.rs @@ -4,7 +4,7 @@ // // min-llvm-version 9.0 // needs-sanitizer-support -// only-x86_64 +// needs-sanitizer-address // // no-prefer-dynamic // revisions: opt0 opt1 diff --git a/src/test/ui/sanitize/thread.rs b/src/test/ui/sanitize/thread.rs index 26590be8b1870..c70cf5accc077 100644 --- a/src/test/ui/sanitize/thread.rs +++ b/src/test/ui/sanitize/thread.rs @@ -11,7 +11,7 @@ // would occasionally fail, making test flaky. // // needs-sanitizer-support -// only-x86_64 +// needs-sanitizer-thread // // compile-flags: -Z sanitizer=thread -O // diff --git a/src/test/ui/sanitize/use-after-scope.rs b/src/test/ui/sanitize/use-after-scope.rs index 6a2067e157af5..30be2ae6f0906 100644 --- a/src/test/ui/sanitize/use-after-scope.rs +++ b/src/test/ui/sanitize/use-after-scope.rs @@ -1,5 +1,5 @@ // needs-sanitizer-support -// only-x86_64 +// needs-sanitizer-address // // compile-flags: -Zsanitizer=address // run-fail diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs index 9d1940dd4d6c2..9614707433e13 100644 --- a/src/tools/compiletest/src/header.rs +++ b/src/tools/compiletest/src/header.rs @@ -43,6 +43,10 @@ impl EarlyProps { let mut props = EarlyProps::default(); let rustc_has_profiler_support = env::var_os("RUSTC_PROFILER_SUPPORT").is_some(); let rustc_has_sanitizer_support = env::var_os("RUSTC_SANITIZER_SUPPORT").is_some(); + let has_asan = util::ASAN_SUPPORTED_TARGETS.contains(&&*config.target); + let has_lsan = util::LSAN_SUPPORTED_TARGETS.contains(&&*config.target); + let has_msan = util::MSAN_SUPPORTED_TARGETS.contains(&&*config.target); + let has_tsan = util::TSAN_SUPPORTED_TARGETS.contains(&&*config.target); iter_header(testfile, None, rdr, &mut |ln| { // we should check if any only- exists and if it exists @@ -74,7 +78,25 @@ impl EarlyProps { props.ignore = true; } - if !rustc_has_sanitizer_support && config.parse_needs_sanitizer_support(ln) { + if !rustc_has_sanitizer_support + && config.parse_name_directive(ln, "needs-sanitizer-support") + { + props.ignore = true; + } + + if !has_asan && config.parse_name_directive(ln, "needs-sanitizer-address") { + props.ignore = true; + } + + if !has_lsan && config.parse_name_directive(ln, "needs-sanitizer-leak") { + props.ignore = true; + } + + if !has_msan && config.parse_name_directive(ln, "needs-sanitizer-memory") { + props.ignore = true; + } + + if !has_tsan && config.parse_name_directive(ln, "needs-sanitizer-thread") { props.ignore = true; } @@ -829,10 +851,6 @@ impl Config { self.parse_name_directive(line, "needs-profiler-support") } - fn parse_needs_sanitizer_support(&self, line: &str) -> bool { - self.parse_name_directive(line, "needs-sanitizer-support") - } - /// Parses a name-value directive which contains config-specific information, e.g., `ignore-x86` /// or `normalize-stderr-32bit`. fn parse_cfg_name_directive(&self, line: &str, prefix: &str) -> ParsedNameDirective { diff --git a/src/tools/compiletest/src/header/tests.rs b/src/tools/compiletest/src/header/tests.rs index 31d991e0c2f87..036409fbf070f 100644 --- a/src/tools/compiletest/src/header/tests.rs +++ b/src/tools/compiletest/src/header/tests.rs @@ -195,3 +195,22 @@ fn debugger() { config.debugger = Some(Debugger::Lldb); assert!(parse_rs(&config, "// ignore-lldb").ignore); } + +#[test] +fn sanitizers() { + let mut config = config(); + + // Target that supports all sanitizers: + config.target = "x86_64-unknown-linux-gnu".to_owned(); + assert!(!parse_rs(&config, "// needs-sanitizer-address").ignore); + assert!(!parse_rs(&config, "// needs-sanitizer-leak").ignore); + assert!(!parse_rs(&config, "// needs-sanitizer-memory").ignore); + assert!(!parse_rs(&config, "// needs-sanitizer-thread").ignore); + + // Target that doesn't support sanitizers: + config.target = "wasm32-unknown-emscripten".to_owned(); + assert!(parse_rs(&config, "// needs-sanitizer-address").ignore); + assert!(parse_rs(&config, "// needs-sanitizer-leak").ignore); + assert!(parse_rs(&config, "// needs-sanitizer-memory").ignore); + assert!(parse_rs(&config, "// needs-sanitizer-thread").ignore); +} diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs index c61bee0f8d9ea..b9087dee6174c 100644 --- a/src/tools/compiletest/src/util.rs +++ b/src/tools/compiletest/src/util.rs @@ -81,6 +81,17 @@ const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[ ("xcore", "xcore"), ]; +pub const ASAN_SUPPORTED_TARGETS: &'static [&'static str] = + &["aarch64-fuchsia", "x86_64-apple-darwin", "x86_64-fuchsia", "x86_64-unknown-linux-gnu"]; + +pub const LSAN_SUPPORTED_TARGETS: &'static [&'static str] = + &["x86_64-apple-darwin", "x86_64-unknown-linux-gnu"]; + +pub const MSAN_SUPPORTED_TARGETS: &'static [&'static str] = &["x86_64-unknown-linux-gnu"]; + +pub const TSAN_SUPPORTED_TARGETS: &'static [&'static str] = + &["x86_64-apple-darwin", "x86_64-unknown-linux-gnu"]; + pub fn matches_os(triple: &str, name: &str) -> bool { // For the wasm32 bare target we ignore anything also ignored on emscripten // and then we also recognize `wasm32-bare` as the os for the target From 0687b78d56b93d28ceeaa05e794849757d7341a4 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sat, 13 Jun 2020 10:29:56 -0700 Subject: [PATCH 03/34] Speed up bootstrap a little. --- src/bootstrap/flags.rs | 7 ++--- src/bootstrap/lib.rs | 2 +- src/bootstrap/metadata.rs | 65 +++++++++++++++++---------------------- src/bootstrap/test.rs | 4 +-- 4 files changed, 33 insertions(+), 45 deletions(-) diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index cfaa43f397095..03b7028b2fa55 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -10,12 +10,10 @@ use std::process; use getopts::Options; use crate::builder::Builder; +use crate::cache::{Interned, INTERNER}; use crate::config::Config; -use crate::metadata; use crate::{Build, DocTests}; -use crate::cache::{Interned, INTERNER}; - /// Deserialized version of all flags for this compile. pub struct Flags { pub verbose: usize, // number of -v args; each extra -v after the first is passed to Cargo @@ -444,8 +442,7 @@ Arguments: // All subcommands except `clean` can have an optional "Available paths" section if matches.opt_present("verbose") { let config = Config::parse(&["build".to_string()]); - let mut build = Build::new(config); - metadata::build(&mut build); + let build = Build::new(config); let maybe_rules_help = Builder::get_help(&build, subcommand.as_str()); extra_help.push_str(maybe_rules_help.unwrap_or_default().as_str()); diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 8d8a036caef88..a125b49fc01e6 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -271,7 +271,7 @@ struct Crate { impl Crate { fn is_local(&self, build: &Build) -> bool { - self.path.starts_with(&build.config.src) && !self.path.to_string_lossy().ends_with("_shim") + self.path.starts_with(&build.config.src) } fn local_path(&self, build: &Build) -> PathBuf { diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs index 292aa3b1e24a7..185f0ddb831e7 100644 --- a/src/bootstrap/metadata.rs +++ b/src/bootstrap/metadata.rs @@ -35,49 +35,24 @@ struct ResolveNode { } pub fn build(build: &mut Build) { - let mut resolves = Vec::new(); - build_krate(&build.std_features(), build, &mut resolves, "src/libstd"); - build_krate("", build, &mut resolves, "src/libtest"); - build_krate(&build.rustc_features(), build, &mut resolves, "src/rustc"); - - let mut id2name = HashMap::with_capacity(build.crates.len()); - for (name, krate) in build.crates.iter() { - id2name.insert(krate.id.clone(), name.clone()); - } - - for node in resolves { - let name = match id2name.get(&node.id) { - Some(name) => name, - None => continue, - }; - - let krate = build.crates.get_mut(name).unwrap(); - for dep in node.dependencies.iter() { - let dep = match id2name.get(dep) { - Some(dep) => dep, - None => continue, - }; - krate.deps.insert(*dep); - } - } -} - -fn build_krate(features: &str, build: &mut Build, resolves: &mut Vec, krate: &str) { // Run `cargo metadata` to figure out what crates we're testing. - // - // Down below we're going to call `cargo test`, but to test the right set - // of packages we're going to have to know what `-p` arguments to pass it - // to know what crates to test. Here we run `cargo metadata` to learn about - // the dependency graph and what `-p` arguments there are. + let features: Vec<_> = build + .std_features() + .split_whitespace() + .map(|f| format!("test/{}", f)) + .chain(build.rustc_features().split_whitespace().map(|f| format!("rustc-main/{}", f))) + .collect(); let mut cargo = Command::new(&build.initial_cargo); cargo .arg("metadata") .arg("--format-version") .arg("1") .arg("--features") - .arg(features) + .arg(features.join(",")) + .arg("-Zpackage-features") .arg("--manifest-path") - .arg(build.src.join(krate).join("Cargo.toml")); + .arg(build.src.join("Cargo.toml")) + .env("RUSTC_BOOTSTRAP", "1"); let output = output(&mut cargo); let output: Output = serde_json::from_str(&output).unwrap(); for package in output.packages { @@ -88,5 +63,23 @@ fn build_krate(features: &str, build: &mut Build, resolves: &mut Vec = + build.crates.iter().map(|(name, krate)| (krate.id.clone(), name.clone())).collect(); + + for node in output.resolve.nodes { + let name = match id2name.get(&node.id) { + Some(name) => name, + None => continue, + }; + + let krate = build.crates.get_mut(name).unwrap(); + for dep in node.dependencies.iter() { + let dep = match id2name.get(dep) { + Some(dep) => dep, + None => continue, + }; + krate.deps.insert(*dep); + } + } } diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 163132f563425..8659acf1cc5a5 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -1654,9 +1654,7 @@ impl Step for Crate { fn should_run(mut run: ShouldRun<'_>) -> ShouldRun<'_> { let builder = run.builder; for krate in run.builder.in_tree_crates("test") { - if !(krate.name.starts_with("rustc_") && krate.name.ends_with("san")) { - run = run.path(krate.local_path(&builder).to_str().unwrap()); - } + run = run.path(krate.local_path(&builder).to_str().unwrap()); } run } From 607e85110ef9c79ce5a52286bb69d385471bc675 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sun, 14 Jun 2020 15:57:21 -0700 Subject: [PATCH 04/34] Switch bootstrap metadata to --no-deps. This should run much faster. There are also some drive-by cleanups here to try to simplify things. Also, the paths for in-tree crates are now displayed as relative in `x.py test -h -v`. --- src/bootstrap/builder.rs | 6 +++-- src/bootstrap/doc.rs | 20 ++------------ src/bootstrap/lib.rs | 25 +++++++++++------- src/bootstrap/metadata.rs | 55 +++++++++------------------------------ src/bootstrap/test.rs | 8 ++---- 5 files changed, 37 insertions(+), 77 deletions(-) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index ffdd8485181f4..345af600c2adb 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -255,7 +255,8 @@ impl<'a> ShouldRun<'a> { pub fn all_krates(mut self, name: &str) -> Self { let mut set = BTreeSet::new(); for krate in self.builder.in_tree_crates(name) { - set.insert(PathBuf::from(&krate.path)); + let path = krate.local_path(self.builder); + set.insert(path); } self.paths.insert(PathSet::Set(set)); self @@ -263,7 +264,8 @@ impl<'a> ShouldRun<'a> { pub fn krate(mut self, name: &str) -> Self { for krate in self.builder.in_tree_crates(name) { - self.paths.insert(PathSet::one(&krate.path)); + let path = krate.local_path(self.builder); + self.paths.insert(PathSet::one(path)); } self } diff --git a/src/bootstrap/doc.rs b/src/bootstrap/doc.rs index 5c01c5e852c48..6d7fb7acfcb04 100644 --- a/src/bootstrap/doc.rs +++ b/src/bootstrap/doc.rs @@ -548,8 +548,8 @@ impl Step for Rustc { // Find dependencies for top level crates. let mut compiler_crates = HashSet::new(); for root_crate in &["rustc_driver", "rustc_codegen_llvm", "rustc_codegen_ssa"] { - let interned_root_crate = INTERNER.intern_str(root_crate); - find_compiler_crates(builder, &interned_root_crate, &mut compiler_crates); + compiler_crates + .extend(builder.in_tree_crates(root_crate).into_iter().map(|krate| krate.name)); } for krate in &compiler_crates { @@ -564,22 +564,6 @@ impl Step for Rustc { } } -fn find_compiler_crates( - builder: &Builder<'_>, - name: &Interned, - crates: &mut HashSet>, -) { - // Add current crate. - crates.insert(*name); - - // Look for dependencies. - for dep in builder.crates.get(name).unwrap().deps.iter() { - if builder.crates.get(dep).unwrap().is_local(builder) { - find_compiler_crates(builder, dep, crates); - } - } -} - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] pub struct Rustdoc { stage: u32, diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index a125b49fc01e6..9d3830da39066 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -270,12 +270,7 @@ struct Crate { } impl Crate { - fn is_local(&self, build: &Build) -> bool { - self.path.starts_with(&build.config.src) - } - fn local_path(&self, build: &Build) -> PathBuf { - assert!(self.is_local(build)); self.path.strip_prefix(&build.config.src).unwrap().into() } } @@ -1079,17 +1074,29 @@ impl Build { } } + /// Returns a Vec of all the dependencies of the given root crate, + /// including transitive dependencies and the root itself. Only includes + /// "local" crates (those in the local source tree, not from a registry). fn in_tree_crates(&self, root: &str) -> Vec<&Crate> { let mut ret = Vec::new(); let mut list = vec![INTERNER.intern_str(root)]; let mut visited = HashSet::new(); while let Some(krate) = list.pop() { let krate = &self.crates[&krate]; - if krate.is_local(self) { - ret.push(krate); - } + ret.push(krate); for dep in &krate.deps { - if visited.insert(dep) && dep != "build_helper" { + // Don't include optional deps if their features are not + // enabled. Ideally this would be computed from `cargo + // metadata --features …`, but that is somewhat slow. Just + // skip `build_helper` since there aren't any operations we + // want to perform on it. In the future, we may want to + // consider just filtering all build and dev dependencies in + // metadata::build. + if visited.insert(dep) + && dep != "build_helper" + && (dep != "profiler_builtins" || self.config.profiler) + && (dep != "rustc_codegen_llvm" || self.config.llvm_enabled()) + { list.push(*dep); } } diff --git a/src/bootstrap/metadata.rs b/src/bootstrap/metadata.rs index 185f0ddb831e7..a38391c7b88f2 100644 --- a/src/bootstrap/metadata.rs +++ b/src/bootstrap/metadata.rs @@ -1,5 +1,3 @@ -use std::collections::HashMap; -use std::collections::HashSet; use std::path::PathBuf; use std::process::Command; @@ -12,7 +10,6 @@ use crate::{Build, Crate}; #[derive(Deserialize)] struct Output { packages: Vec, - resolve: Resolve, } #[derive(Deserialize)] @@ -21,38 +18,25 @@ struct Package { name: String, source: Option, manifest_path: String, + dependencies: Vec, } #[derive(Deserialize)] -struct Resolve { - nodes: Vec, -} - -#[derive(Deserialize)] -struct ResolveNode { - id: String, - dependencies: Vec, +struct Dependency { + name: String, + source: Option, } pub fn build(build: &mut Build) { // Run `cargo metadata` to figure out what crates we're testing. - let features: Vec<_> = build - .std_features() - .split_whitespace() - .map(|f| format!("test/{}", f)) - .chain(build.rustc_features().split_whitespace().map(|f| format!("rustc-main/{}", f))) - .collect(); let mut cargo = Command::new(&build.initial_cargo); cargo .arg("metadata") .arg("--format-version") .arg("1") - .arg("--features") - .arg(features.join(",")) - .arg("-Zpackage-features") + .arg("--no-deps") .arg("--manifest-path") - .arg(build.src.join("Cargo.toml")) - .env("RUSTC_BOOTSTRAP", "1"); + .arg(build.src.join("Cargo.toml")); let output = output(&mut cargo); let output: Output = serde_json::from_str(&output).unwrap(); for package in output.packages { @@ -60,26 +44,13 @@ pub fn build(build: &mut Build) { let name = INTERNER.intern_string(package.name); let mut path = PathBuf::from(package.manifest_path); path.pop(); - build.crates.insert(name, Crate { name, id: package.id, deps: HashSet::new(), path }); - } - } - - let id2name: HashMap<_, _> = - build.crates.iter().map(|(name, krate)| (krate.id.clone(), name.clone())).collect(); - - for node in output.resolve.nodes { - let name = match id2name.get(&node.id) { - Some(name) => name, - None => continue, - }; - - let krate = build.crates.get_mut(name).unwrap(); - for dep in node.dependencies.iter() { - let dep = match id2name.get(dep) { - Some(dep) => dep, - None => continue, - }; - krate.deps.insert(*dep); + let deps = package + .dependencies + .into_iter() + .filter(|dep| dep.source.is_none()) + .map(|dep| INTERNER.intern_string(dep.name)) + .collect(); + build.crates.insert(name, Crate { name, id: package.id, deps, path }); } } } diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 8659acf1cc5a5..c1d0316920be7 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -1651,12 +1651,8 @@ impl Step for Crate { type Output = (); const DEFAULT: bool = true; - fn should_run(mut run: ShouldRun<'_>) -> ShouldRun<'_> { - let builder = run.builder; - for krate in run.builder.in_tree_crates("test") { - run = run.path(krate.local_path(&builder).to_str().unwrap()); - } - run + fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> { + run.krate("test") } fn make_run(run: RunConfig<'_>) { From 5393a2995bcea9a927c23d88a921c55bea886771 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Sat, 16 May 2020 18:44:31 +0100 Subject: [PATCH 05/34] Move convert_place_derefs_to_mutable out from check/method/confirm.rs This can live inside FnCtxt rather than ConfirmContext, and would be useful for other operations as well. --- src/librustc_typeck/check/method/confirm.rs | 151 +------------------ src/librustc_typeck/check/mod.rs | 1 + src/librustc_typeck/check/reconciliation.rs | 153 ++++++++++++++++++++ 3 files changed, 157 insertions(+), 148 deletions(-) create mode 100644 src/librustc_typeck/check/reconciliation.rs diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index 007794ce1b7ff..867dacede6e44 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -1,12 +1,12 @@ use super::{probe, MethodCallee}; use crate::astconv::AstConv; -use crate::check::{callee, FnCtxt, Needs, PlaceOp}; +use crate::check::{callee, FnCtxt, Needs}; use crate::hir::def_id::DefId; use crate::hir::GenericArg; use rustc_hir as hir; use rustc_infer::infer::{self, InferOk}; -use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref, PointerCast}; +use rustc_middle::ty::adjustment::{Adjust, Adjustment, PointerCast}; use rustc_middle::ty::adjustment::{AllowTwoPhase, AutoBorrow, AutoBorrowMutability}; use rustc_middle::ty::fold::TypeFoldable; use rustc_middle::ty::subst::{Subst, SubstsRef}; @@ -121,7 +121,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { let callee = MethodCallee { def_id: pick.item.def_id, substs: all_substs, sig: method_sig }; if let Some(hir::Mutability::Mut) = pick.autoref { - self.convert_place_derefs_to_mutable(); + self.convert_place_derefs_to_mutable(self.self_expr); } ConfirmResult { callee, illegal_sized_bound } @@ -416,151 +416,6 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { self.register_wf_obligation(fty.into(), self.span, traits::MiscObligation); } - /////////////////////////////////////////////////////////////////////////// - // RECONCILIATION - - /// When we select a method with a mutable autoref, we have to go convert any - /// auto-derefs, indices, etc from `Deref` and `Index` into `DerefMut` and `IndexMut` - /// respectively. - fn convert_place_derefs_to_mutable(&self) { - // Gather up expressions we want to munge. - let mut exprs = vec![self.self_expr]; - - loop { - match exprs.last().unwrap().kind { - hir::ExprKind::Field(ref expr, _) - | hir::ExprKind::Index(ref expr, _) - | hir::ExprKind::Unary(hir::UnOp::UnDeref, ref expr) => exprs.push(&expr), - _ => break, - } - } - - debug!("convert_place_derefs_to_mutable: exprs={:?}", exprs); - - // Fix up autoderefs and derefs. - for (i, &expr) in exprs.iter().rev().enumerate() { - debug!("convert_place_derefs_to_mutable: i={} expr={:?}", i, expr); - - // Fix up the autoderefs. Autorefs can only occur immediately preceding - // overloaded place ops, and will be fixed by them in order to get - // the correct region. - let mut source = self.node_ty(expr.hir_id); - // Do not mutate adjustments in place, but rather take them, - // and replace them after mutating them, to avoid having the - // tables borrowed during (`deref_mut`) method resolution. - let previous_adjustments = - self.tables.borrow_mut().adjustments_mut().remove(expr.hir_id); - if let Some(mut adjustments) = previous_adjustments { - let needs = Needs::MutPlace; - for adjustment in &mut adjustments { - if let Adjust::Deref(Some(ref mut deref)) = adjustment.kind { - if let Some(ok) = self.try_overloaded_deref(expr.span, source, needs) { - let method = self.register_infer_ok_obligations(ok); - if let ty::Ref(region, _, mutbl) = method.sig.output().kind { - *deref = OverloadedDeref { region, mutbl }; - } - } - } - source = adjustment.target; - } - self.tables.borrow_mut().adjustments_mut().insert(expr.hir_id, adjustments); - } - - match expr.kind { - hir::ExprKind::Index(ref base_expr, ref index_expr) => { - // We need to get the final type in case dereferences were needed for the trait - // to apply (#72002). - let index_expr_ty = self.tables.borrow().expr_ty_adjusted(index_expr); - self.convert_place_op_to_mutable( - PlaceOp::Index, - expr, - base_expr, - &[index_expr_ty], - ); - } - hir::ExprKind::Unary(hir::UnOp::UnDeref, ref base_expr) => { - self.convert_place_op_to_mutable(PlaceOp::Deref, expr, base_expr, &[]); - } - _ => {} - } - } - } - - fn convert_place_op_to_mutable( - &self, - op: PlaceOp, - expr: &hir::Expr<'_>, - base_expr: &hir::Expr<'_>, - arg_tys: &[Ty<'tcx>], - ) { - debug!("convert_place_op_to_mutable({:?}, {:?}, {:?}, {:?})", op, expr, base_expr, arg_tys); - if !self.tables.borrow().is_method_call(expr) { - debug!("convert_place_op_to_mutable - builtin, nothing to do"); - return; - } - - let base_ty = self - .tables - .borrow() - .expr_adjustments(base_expr) - .last() - .map_or_else(|| self.node_ty(expr.hir_id), |adj| adj.target); - let base_ty = self.resolve_vars_if_possible(&base_ty); - - // Need to deref because overloaded place ops take self by-reference. - let base_ty = - base_ty.builtin_deref(false).expect("place op takes something that is not a ref").ty; - - let method = self.try_overloaded_place_op(expr.span, base_ty, arg_tys, Needs::MutPlace, op); - let method = match method { - Some(ok) => self.register_infer_ok_obligations(ok), - None => return self.tcx.sess.delay_span_bug(expr.span, "re-trying op failed"), - }; - debug!("convert_place_op_to_mutable: method={:?}", method); - self.write_method_call(expr.hir_id, method); - - let (region, mutbl) = if let ty::Ref(r, _, mutbl) = method.sig.inputs()[0].kind { - (r, mutbl) - } else { - span_bug!(expr.span, "input to place op is not a ref?"); - }; - - // Convert the autoref in the base expr to mutable with the correct - // region and mutability. - let base_expr_ty = self.node_ty(base_expr.hir_id); - if let Some(adjustments) = - self.tables.borrow_mut().adjustments_mut().get_mut(base_expr.hir_id) - { - let mut source = base_expr_ty; - for adjustment in &mut adjustments[..] { - if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind { - debug!("convert_place_op_to_mutable: converting autoref {:?}", adjustment); - let mutbl = match mutbl { - hir::Mutability::Not => AutoBorrowMutability::Not, - hir::Mutability::Mut => AutoBorrowMutability::Mut { - // For initial two-phase borrow - // deployment, conservatively omit - // overloaded operators. - allow_two_phase_borrow: AllowTwoPhase::No, - }, - }; - adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(region, mutbl)); - adjustment.target = - self.tcx.mk_ref(region, ty::TypeAndMut { ty: source, mutbl: mutbl.into() }); - } - source = adjustment.target; - } - - // If we have an autoref followed by unsizing at the end, fix the unsize target. - - if let [.., Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. }, Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }] = - adjustments[..] - { - *target = method.sig.inputs()[0]; - } - } - } - /////////////////////////////////////////////////////////////////////////// // MISCELLANY diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index fabedc3800ae4..1e4085f026768 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -79,6 +79,7 @@ pub mod intrinsic; pub mod method; mod op; mod pat; +mod reconciliation; mod regionck; mod upvar; mod wfcheck; diff --git a/src/librustc_typeck/check/reconciliation.rs b/src/librustc_typeck/check/reconciliation.rs new file mode 100644 index 0000000000000..b05155ae2aedd --- /dev/null +++ b/src/librustc_typeck/check/reconciliation.rs @@ -0,0 +1,153 @@ +use crate::check::{FnCtxt, Needs, PlaceOp}; +use rustc_hir as hir; +use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref, PointerCast}; +use rustc_middle::ty::adjustment::{AllowTwoPhase, AutoBorrow, AutoBorrowMutability}; +use rustc_middle::ty::{self, Ty}; + +impl<'a, 'tcx> FnCtxt<'a, 'tcx> { + /// Convert auto-derefs, indices, etc of an expression from `Deref` and `Index` + /// into `DerefMut` and `IndexMut` respectively. + /// + /// This is a second pass of typechecking derefs/indices. We need this we do not + /// always know whether a place needs to be mutable or not in the first pass. + /// This happens whether there is an implicit mutable reborrow, e.g. when the type + /// is used as the receiver of a method call. + pub fn convert_place_derefs_to_mutable(&self, expr: &hir::Expr<'_>) { + // Gather up expressions we want to munge. + let mut exprs = vec![expr]; + + loop { + match exprs.last().unwrap().kind { + hir::ExprKind::Field(ref expr, _) + | hir::ExprKind::Index(ref expr, _) + | hir::ExprKind::Unary(hir::UnOp::UnDeref, ref expr) => exprs.push(&expr), + _ => break, + } + } + + debug!("convert_place_derefs_to_mutable: exprs={:?}", exprs); + + // Fix up autoderefs and derefs. + for (i, &expr) in exprs.iter().rev().enumerate() { + debug!("convert_place_derefs_to_mutable: i={} expr={:?}", i, expr); + + // Fix up the autoderefs. Autorefs can only occur immediately preceding + // overloaded place ops, and will be fixed by them in order to get + // the correct region. + let mut source = self.node_ty(expr.hir_id); + // Do not mutate adjustments in place, but rather take them, + // and replace them after mutating them, to avoid having the + // tables borrowed during (`deref_mut`) method resolution. + let previous_adjustments = + self.tables.borrow_mut().adjustments_mut().remove(expr.hir_id); + if let Some(mut adjustments) = previous_adjustments { + let needs = Needs::MutPlace; + for adjustment in &mut adjustments { + if let Adjust::Deref(Some(ref mut deref)) = adjustment.kind { + if let Some(ok) = self.try_overloaded_deref(expr.span, source, needs) { + let method = self.register_infer_ok_obligations(ok); + if let ty::Ref(region, _, mutbl) = method.sig.output().kind { + *deref = OverloadedDeref { region, mutbl }; + } + } + } + source = adjustment.target; + } + self.tables.borrow_mut().adjustments_mut().insert(expr.hir_id, adjustments); + } + + match expr.kind { + hir::ExprKind::Index(ref base_expr, ref index_expr) => { + // We need to get the final type in case dereferences were needed for the trait + // to apply (#72002). + let index_expr_ty = self.tables.borrow().expr_ty_adjusted(index_expr); + self.convert_place_op_to_mutable( + PlaceOp::Index, + expr, + base_expr, + &[index_expr_ty], + ); + } + hir::ExprKind::Unary(hir::UnOp::UnDeref, ref base_expr) => { + self.convert_place_op_to_mutable(PlaceOp::Deref, expr, base_expr, &[]); + } + _ => {} + } + } + } + + fn convert_place_op_to_mutable( + &self, + op: PlaceOp, + expr: &hir::Expr<'_>, + base_expr: &hir::Expr<'_>, + arg_tys: &[Ty<'tcx>], + ) { + debug!("convert_place_op_to_mutable({:?}, {:?}, {:?}, {:?})", op, expr, base_expr, arg_tys); + if !self.tables.borrow().is_method_call(expr) { + debug!("convert_place_op_to_mutable - builtin, nothing to do"); + return; + } + + let base_ty = self + .tables + .borrow() + .expr_adjustments(base_expr) + .last() + .map_or_else(|| self.node_ty(expr.hir_id), |adj| adj.target); + let base_ty = self.resolve_vars_if_possible(&base_ty); + + // Need to deref because overloaded place ops take self by-reference. + let base_ty = + base_ty.builtin_deref(false).expect("place op takes something that is not a ref").ty; + + let method = self.try_overloaded_place_op(expr.span, base_ty, arg_tys, Needs::MutPlace, op); + let method = match method { + Some(ok) => self.register_infer_ok_obligations(ok), + None => return self.tcx.sess.delay_span_bug(expr.span, "re-trying op failed"), + }; + debug!("convert_place_op_to_mutable: method={:?}", method); + self.write_method_call(expr.hir_id, method); + + let (region, mutbl) = if let ty::Ref(r, _, mutbl) = method.sig.inputs()[0].kind { + (r, mutbl) + } else { + span_bug!(expr.span, "input to place op is not a ref?"); + }; + + // Convert the autoref in the base expr to mutable with the correct + // region and mutability. + let base_expr_ty = self.node_ty(base_expr.hir_id); + if let Some(adjustments) = + self.tables.borrow_mut().adjustments_mut().get_mut(base_expr.hir_id) + { + let mut source = base_expr_ty; + for adjustment in &mut adjustments[..] { + if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind { + debug!("convert_place_op_to_mutable: converting autoref {:?}", adjustment); + let mutbl = match mutbl { + hir::Mutability::Not => AutoBorrowMutability::Not, + hir::Mutability::Mut => AutoBorrowMutability::Mut { + // For initial two-phase borrow + // deployment, conservatively omit + // overloaded operators. + allow_two_phase_borrow: AllowTwoPhase::No, + }, + }; + adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(region, mutbl)); + adjustment.target = + self.tcx.mk_ref(region, ty::TypeAndMut { ty: source, mutbl: mutbl.into() }); + } + source = adjustment.target; + } + + // If we have an autoref followed by unsizing at the end, fix the unsize target. + + if let [.., Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. }, Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }] = + adjustments[..] + { + *target = method.sig.inputs()[0]; + } + } + } +} From fb0793c610d99857820721f58456920e9e0bc240 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Mon, 15 Jun 2020 00:57:21 +0100 Subject: [PATCH 06/34] Add some comments to librustc_typeck/check/callee.rs --- src/librustc_typeck/check/callee.rs | 37 +++++++++++++++++------------ 1 file changed, 22 insertions(+), 15 deletions(-) diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index aa316105f7f11..f86b7f07b7fc4 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -220,21 +220,28 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let method = self.register_infer_ok_obligations(ok); let mut autoref = None; if borrow { - if let ty::Ref(region, _, mutbl) = method.sig.inputs()[0].kind { - let mutbl = match mutbl { - hir::Mutability::Not => AutoBorrowMutability::Not, - hir::Mutability::Mut => AutoBorrowMutability::Mut { - // For initial two-phase borrow - // deployment, conservatively omit - // overloaded function call ops. - allow_two_phase_borrow: AllowTwoPhase::No, - }, - }; - autoref = Some(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)), - target: method.sig.inputs()[0], - }); - } + // Check for &self vs &mut self in the method signature. Since this is either + // the Fn or FnMut trait, it should be one of those. + let (region, mutbl) = if let ty::Ref(r, _, mutbl) = method.sig.inputs()[0].kind + { + (r, mutbl) + } else { + span_bug!(call_expr.span, "input to call/call_mut is not a ref?"); + }; + + let mutbl = match mutbl { + hir::Mutability::Not => AutoBorrowMutability::Not, + hir::Mutability::Mut => AutoBorrowMutability::Mut { + // For initial two-phase borrow + // deployment, conservatively omit + // overloaded function call ops. + allow_two_phase_borrow: AllowTwoPhase::No, + }, + }; + autoref = Some(Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)), + target: method.sig.inputs()[0], + }); } return Some((autoref, method)); } From c2b920fab328201a2b5507b9a484c8c09752af93 Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sun, 14 Jun 2020 16:58:45 -0700 Subject: [PATCH 07/34] Show suite paths (`src/test/ui/...`) in help output. --- src/bootstrap/builder.rs | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 345af600c2adb..545ad64ba2cf6 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -489,13 +489,19 @@ impl<'a> Builder<'a> { should_run = (desc.should_run)(should_run); } let mut help = String::from("Available paths:\n"); + let mut add_path = |path: &Path| { + help.push_str(&format!(" ./x.py {} {}\n", subcommand, path.display())); + }; for pathset in should_run.paths { - if let PathSet::Set(set) = pathset { - set.iter().for_each(|path| { - help.push_str( - format!(" ./x.py {} {}\n", subcommand, path.display()).as_str(), - ) - }) + match pathset { + PathSet::Set(set) => { + for path in set { + add_path(&path); + } + } + PathSet::Suite(path) => { + add_path(&path.join("...")); + } } } Some(help) From f17fd7b0e692c59075db58ac2e7ca3ac2d5e19bd Mon Sep 17 00:00:00 2001 From: Eric Huss Date: Sun, 14 Jun 2020 17:00:34 -0700 Subject: [PATCH 08/34] Add some doc comments regarding PathSet. --- src/bootstrap/builder.rs | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/src/bootstrap/builder.rs b/src/bootstrap/builder.rs index 545ad64ba2cf6..c2f748f161f18 100644 --- a/src/bootstrap/builder.rs +++ b/src/bootstrap/builder.rs @@ -97,9 +97,21 @@ struct StepDescription { name: &'static str, } +/// Collection of paths used to match a task rule. #[derive(Debug, Clone, PartialOrd, Ord, PartialEq, Eq)] pub enum PathSet { + /// A collection of individual paths. + /// + /// These are generally matched as a path suffix. For example, a + /// command-line value of `libstd` will match if `src/libstd` is in the + /// set. Set(BTreeSet), + /// A "suite" of paths. + /// + /// These can match as a path suffix (like `Set`), or as a prefix. For + /// example, a command-line value of `src/test/ui/abi/variadic-ffi.rs` + /// will match `src/test/ui`. A command-line value of `ui` would also + /// match `src/test/ui`. Suite(PathBuf), } @@ -249,9 +261,15 @@ impl<'a> ShouldRun<'a> { self } - // Unlike `krate` this will create just one pathset. As such, it probably shouldn't actually - // ever be used, but as we transition to having all rules properly handle passing krate(...) by - // actually doing something different for every crate passed. + /// Indicates it should run if the command-line selects the given crate or + /// any of its (local) dependencies. + /// + /// Compared to `krate`, this treats the dependencies as aliases for the + /// same job. Generally it is preferred to use `krate`, and treat each + /// individual path separately. For example `./x.py test src/liballoc` + /// (which uses `krate`) will test just `liballoc`. However, `./x.py check + /// src/liballoc` (which uses `all_krates`) will check all of `libtest`. + /// `all_krates` should probably be removed at some point. pub fn all_krates(mut self, name: &str) -> Self { let mut set = BTreeSet::new(); for krate in self.builder.in_tree_crates(name) { @@ -262,6 +280,10 @@ impl<'a> ShouldRun<'a> { self } + /// Indicates it should run if the command-line selects the given crate or + /// any of its (local) dependencies. + /// + /// `make_run` will be called separately for each matching command-line path. pub fn krate(mut self, name: &str) -> Self { for krate in self.builder.in_tree_crates(name) { let path = krate.local_path(self.builder); From 8121d2e0576e74b23f0019e857b1088197ef8c04 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Mon, 15 Jun 2020 00:58:37 +0100 Subject: [PATCH 09/34] Fix up autoderef when performing mutable auto borrow --- src/librustc_typeck/check/method/confirm.rs | 5 ----- src/librustc_typeck/check/mod.rs | 15 +++++++++++++++ 2 files changed, 15 insertions(+), 5 deletions(-) diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index 867dacede6e44..c0f1f356ef372 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -119,11 +119,6 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { // Create the final `MethodCallee`. let callee = MethodCallee { def_id: pick.item.def_id, substs: all_substs, sig: method_sig }; - - if let Some(hir::Mutability::Mut) = pick.autoref { - self.convert_place_derefs_to_mutable(self.self_expr); - } - ConfirmResult { callee, illegal_sized_bound } } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 1e4085f026768..82523f843aef9 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -3183,6 +3183,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return; } + let autoborrow_mut = adj.iter().any(|adj| { + matches!(adj, &Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(_, AutoBorrowMutability::Mut { .. })), + .. + }) + }); + match self.tables.borrow_mut().adjustments_mut().entry(expr.hir_id) { Entry::Vacant(entry) => { entry.insert(adj); @@ -3212,6 +3219,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { *entry.get_mut() = adj; } } + + // When there is an auto mutable borrow, it is equivalent to `&mut expr`, + // thus `expr` is ought to be typechecked with needs = [`Needs::MutPlace`]. + // However in many cases it might not be checked this way originally, e.g. + // the receiver of a method call. We need to fix them up. + if autoborrow_mut { + self.convert_place_derefs_to_mutable(expr); + } } /// Basically whenever we are converting from a type scheme into From 4710f85882c08594a900b09c13fbe51ca207daec Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Mon, 15 Jun 2020 00:59:03 +0100 Subject: [PATCH 10/34] Add ui tests for issue 68590 and 72225 --- .../issue-68590-reborrow-through-derefmut.rs | 25 +++++++++++++++++++ ...issue-72225-call-fnmut-through-derefmut.rs | 21 ++++++++++++++++ 2 files changed, 46 insertions(+) create mode 100644 src/test/ui/typeck/issue-68590-reborrow-through-derefmut.rs create mode 100644 src/test/ui/typeck/issue-72225-call-fnmut-through-derefmut.rs diff --git a/src/test/ui/typeck/issue-68590-reborrow-through-derefmut.rs b/src/test/ui/typeck/issue-68590-reborrow-through-derefmut.rs new file mode 100644 index 0000000000000..e4436260e70a0 --- /dev/null +++ b/src/test/ui/typeck/issue-68590-reborrow-through-derefmut.rs @@ -0,0 +1,25 @@ +// check-pass + +// rust-lang/rust#68590: confusing diagnostics when reborrowing through DerefMut. + +use std::cell::RefCell; + +struct A; + +struct S<'a> { + a: &'a mut A, +} + +fn take_a(_: &mut A) {} + +fn test<'a>(s: &RefCell>) { + let mut guard = s.borrow_mut(); + take_a(guard.a); + let _s2 = S { a: guard.a }; +} + +fn main() { + let a = &mut A; + let s = RefCell::new(S { a }); + test(&s); +} diff --git a/src/test/ui/typeck/issue-72225-call-fnmut-through-derefmut.rs b/src/test/ui/typeck/issue-72225-call-fnmut-through-derefmut.rs new file mode 100644 index 0000000000000..3ea05389f04a0 --- /dev/null +++ b/src/test/ui/typeck/issue-72225-call-fnmut-through-derefmut.rs @@ -0,0 +1,21 @@ +// check-pass + +// rust-lang/rust#72225: confusing diagnostics when calling FnMut through DerefMut. + +use std::cell::RefCell; + +struct S { + f: Box +} + +fn test(s: &RefCell) { + let mut guard = s.borrow_mut(); + (guard.f)(); +} + +fn main() { + let s = RefCell::new(S { + f: Box::new(|| ()) + }); + test(&s); +} From d6156e8fe5619143c687983d3ffa5b7ccc37c77e Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Mon, 8 Jun 2020 09:02:57 -0700 Subject: [PATCH 11/34] Change how compiler-builtins gets many CGUs This commit intends to fix an accidental regression from #70846. The goal of #70846 was to build compiler-builtins with a maximal number of CGUs to ensure that each module in the source corresponds to an object file. This high degree of control for compiler-builtins is desirable to ensure that there's at most one exported symbol per CGU, ideally enabling compiler-builtins to not conflict with the system libgcc as often. In #70846, however, only part of the compiler understands that compiler-builtins is built with many CGUs. The rest of the compiler thinks it's building with `sess.codegen_units()`. Notably the calculation of `sess.lto()` consults `sess.codegen_units()`, which when there's only one CGU it disables ThinLTO. This means that compiler-builtins is built without ThinLTO, which is quite harmful to performance! This is the root of the cause from #73135 where intrinsics were found to not be inlining trivial functions. The fix applied in this commit is to remove the special-casing of compiler-builtins in the compiler. Instead the build system is now responsible for special-casing compiler-builtins. It doesn't know exactly how many CGUs will be needed but it passes a large number that is assumed to be much greater than the number of source-level modules needed. After reading the various locations in the compiler source, this seemed like the best solution rather than adding more and more special casing in the compiler for compiler-builtins. Closes #73135 --- Cargo.toml | 13 ++++++ src/librustc_mir/monomorphize/partitioning.rs | 9 +---- .../partitioning/compiler-builtins.rs | 40 ------------------- 3 files changed, 14 insertions(+), 48 deletions(-) delete mode 100644 src/test/codegen-units/partitioning/compiler-builtins.rs diff --git a/Cargo.toml b/Cargo.toml index f2177a99a9b88..f10d539d8296b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -42,6 +42,19 @@ debug-assertions = false debug = false debug-assertions = false +[profile.release.package.compiler_builtins] +# For compiler-builtins we always use a high number of codegen units. +# The goal here is to place every single intrinsic into its own object +# file to avoid symbol clashes with the system libgcc if possible. Note +# that this number doesn't actually produce this many object files, we +# just don't create more than this number of object files. +# +# It's a bit of a bummer that we have to pass this here, unfortunately. +# Ideally this would be specified through an env var to Cargo so Cargo +# knows how many CGUs are for this specific crate, but for now +# per-crate configuration isn't specifiable in the environment. +codegen-units = 10000 + # We want the RLS to use the version of Cargo that we've got vendored in this # repository to ensure that the same exact version of Cargo is used by both the # RLS and the Cargo binary itself. The RLS depends on Cargo as a git repository diff --git a/src/librustc_mir/monomorphize/partitioning.rs b/src/librustc_mir/monomorphize/partitioning.rs index db1ea72c0a531..a945c1d626a9a 100644 --- a/src/librustc_mir/monomorphize/partitioning.rs +++ b/src/librustc_mir/monomorphize/partitioning.rs @@ -454,18 +454,11 @@ fn default_visibility(tcx: TyCtxt<'_>, id: DefId, is_generic: bool) -> Visibilit fn merge_codegen_units<'tcx>( tcx: TyCtxt<'tcx>, initial_partitioning: &mut PreInliningPartitioning<'tcx>, - mut target_cgu_count: usize, + target_cgu_count: usize, ) { assert!(target_cgu_count >= 1); let codegen_units = &mut initial_partitioning.codegen_units; - if tcx.is_compiler_builtins(LOCAL_CRATE) { - // Compiler builtins require some degree of control over how mono items - // are partitioned into compilation units. Provide it by keeping the - // original partitioning when compiling the compiler builtins crate. - target_cgu_count = codegen_units.len(); - } - // Note that at this point in time the `codegen_units` here may not be in a // deterministic order (but we know they're deterministically the same set). // We want this merging to produce a deterministic ordering of codegen units diff --git a/src/test/codegen-units/partitioning/compiler-builtins.rs b/src/test/codegen-units/partitioning/compiler-builtins.rs deleted file mode 100644 index 25195743b0400..0000000000000 --- a/src/test/codegen-units/partitioning/compiler-builtins.rs +++ /dev/null @@ -1,40 +0,0 @@ -// Verifies that during compiler_builtins compilation the codegen units are kept -// unmerged. Even when only a single codegen unit is requested with -Ccodegen-units=1. -// -// compile-flags: -Zprint-mono-items=eager -Ccodegen-units=1 - -#![compiler_builtins] -#![crate_type="lib"] -#![feature(compiler_builtins)] - -mod atomics { - //~ MONO_ITEM fn compiler_builtins::atomics[0]::sync_1[0] @@ compiler_builtins-cgu.0[External] - #[no_mangle] - pub extern "C" fn sync_1() {} - - //~ MONO_ITEM fn compiler_builtins::atomics[0]::sync_2[0] @@ compiler_builtins-cgu.0[External] - #[no_mangle] - pub extern "C" fn sync_2() {} - - //~ MONO_ITEM fn compiler_builtins::atomics[0]::sync_3[0] @@ compiler_builtins-cgu.0[External] - #[no_mangle] - pub extern "C" fn sync_3() {} -} - -mod x { - //~ MONO_ITEM fn compiler_builtins::x[0]::x[0] @@ compiler_builtins-cgu.1[External] - #[no_mangle] - pub extern "C" fn x() {} -} - -mod y { - //~ MONO_ITEM fn compiler_builtins::y[0]::y[0] @@ compiler_builtins-cgu.2[External] - #[no_mangle] - pub extern "C" fn y() {} -} - -mod z { - //~ MONO_ITEM fn compiler_builtins::z[0]::z[0] @@ compiler_builtins-cgu.3[External] - #[no_mangle] - pub extern "C" fn z() {} -} From e390acdfccdc5297e8fbb186bbb890cb6a3d0e57 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Mon, 15 Jun 2020 15:59:51 +0100 Subject: [PATCH 12/34] Use expr_ty_adjusted in convert_place_op_to_mutable --- src/librustc_typeck/check/reconciliation.rs | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/librustc_typeck/check/reconciliation.rs b/src/librustc_typeck/check/reconciliation.rs index b05155ae2aedd..0a4293140a82a 100644 --- a/src/librustc_typeck/check/reconciliation.rs +++ b/src/librustc_typeck/check/reconciliation.rs @@ -89,17 +89,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return; } + // Need to deref because overloaded place ops take self by-reference. let base_ty = self .tables .borrow() - .expr_adjustments(base_expr) - .last() - .map_or_else(|| self.node_ty(expr.hir_id), |adj| adj.target); - let base_ty = self.resolve_vars_if_possible(&base_ty); - - // Need to deref because overloaded place ops take self by-reference. - let base_ty = - base_ty.builtin_deref(false).expect("place op takes something that is not a ref").ty; + .expr_ty_adjusted(base_expr) + .builtin_deref(false) + .expect("place op takes something that is not a ref") + .ty; let method = self.try_overloaded_place_op(expr.span, base_ty, arg_tys, Needs::MutPlace, op); let method = match method { From 8e7606f204d8775b051cdd9a427ec6dd89b837b8 Mon Sep 17 00:00:00 2001 From: "NODA, Kai" Date: Mon, 15 Jun 2020 02:11:35 +0800 Subject: [PATCH 13/34] bootstrap/install.rs: support a nonexistent `prefix` in `x.py install` PR #49778 introduced fs::canonicalize() which fails for a nonexistent path. This is a surprise for someone used to GNU Autotools' configure which can create any necessary intermediate directories in prefix. This change makes it run fs::create_dir_all() before canonicalize(). --- src/bootstrap/install.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs index fafd3cdf927c0..fbdef9d8272f7 100644 --- a/src/bootstrap/install.rs +++ b/src/bootstrap/install.rs @@ -70,7 +70,10 @@ fn install_sh( let libdir_default = PathBuf::from("lib"); let mandir_default = datadir_default.join("man"); let prefix = builder.config.prefix.as_ref().map_or(prefix_default, |p| { - fs::canonicalize(p).unwrap_or_else(|_| panic!("could not canonicalize {}", p.display())) + fs::create_dir_all(p) + .unwrap_or_else(|err| panic!("could not create {}: {}", p.display(), err)); + fs::canonicalize(p) + .unwrap_or_else(|err| panic!("could not canonicalize {}: {}", p.display(), err)) }); let sysconfdir = builder.config.sysconfdir.as_ref().unwrap_or(&sysconfdir_default); let datadir = builder.config.datadir.as_ref().unwrap_or(&datadir_default); From 5cedf5dfba1c83f2fe3e2fcb7acbc20c6e34604a Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Mon, 15 Jun 2020 21:59:09 +0100 Subject: [PATCH 14/34] Refactor usage of Needs in typeck --- src/librustc_typeck/check/autoderef.rs | 12 +- src/librustc_typeck/check/callee.rs | 8 +- src/librustc_typeck/check/coercion.rs | 5 +- src/librustc_typeck/check/expr.rs | 87 ++--- src/librustc_typeck/check/method/confirm.rs | 4 +- src/librustc_typeck/check/mod.rs | 153 +-------- src/librustc_typeck/check/op.rs | 6 +- src/librustc_typeck/check/place_op.rs | 334 ++++++++++++++++++++ src/librustc_typeck/check/reconciliation.rs | 150 --------- 9 files changed, 382 insertions(+), 377 deletions(-) create mode 100644 src/librustc_typeck/check/place_op.rs delete mode 100644 src/librustc_typeck/check/reconciliation.rs diff --git a/src/librustc_typeck/check/autoderef.rs b/src/librustc_typeck/check/autoderef.rs index 73d4e2b78206d..2570025959cb4 100644 --- a/src/librustc_typeck/check/autoderef.rs +++ b/src/librustc_typeck/check/autoderef.rs @@ -1,5 +1,5 @@ use super::method::MethodCallee; -use super::{FnCtxt, Needs, PlaceOp}; +use super::{FnCtxt, PlaceOp}; use rustc_errors::struct_span_err; use rustc_hir as hir; @@ -170,14 +170,13 @@ impl<'a, 'tcx> Autoderef<'a, 'tcx> { } /// Returns the adjustment steps. - pub fn adjust_steps(&self, fcx: &FnCtxt<'a, 'tcx>, needs: Needs) -> Vec> { - fcx.register_infer_ok_obligations(self.adjust_steps_as_infer_ok(fcx, needs)) + pub fn adjust_steps(&self, fcx: &FnCtxt<'a, 'tcx>) -> Vec> { + fcx.register_infer_ok_obligations(self.adjust_steps_as_infer_ok(fcx)) } pub fn adjust_steps_as_infer_ok( &self, fcx: &FnCtxt<'a, 'tcx>, - needs: Needs, ) -> InferOk<'tcx, Vec>> { let mut obligations = vec![]; let targets = self.steps.iter().skip(1).map(|&(ty, _)| ty).chain(iter::once(self.cur_ty)); @@ -186,7 +185,7 @@ impl<'a, 'tcx> Autoderef<'a, 'tcx> { .iter() .map(|&(source, kind)| { if let AutoderefKind::Overloaded = kind { - fcx.try_overloaded_deref(self.span, source, needs).and_then( + fcx.try_overloaded_deref(self.span, source).and_then( |InferOk { value: method, obligations: o }| { obligations.extend(o); if let ty::Ref(region, _, mutbl) = method.sig.output().kind { @@ -266,8 +265,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, span: Span, base_ty: Ty<'tcx>, - needs: Needs, ) -> Option>> { - self.try_overloaded_place_op(span, base_ty, &[], needs, PlaceOp::Deref) + self.try_overloaded_place_op(span, base_ty, &[], PlaceOp::Deref) } } diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index f86b7f07b7fc4..916fe9afc876a 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -1,6 +1,6 @@ use super::autoderef::Autoderef; use super::method::MethodCallee; -use super::{Expectation, FnCtxt, Needs, TupleArgumentsFlag}; +use super::{Expectation, FnCtxt, TupleArgumentsFlag}; use crate::type_error_struct; use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder}; @@ -115,7 +115,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // If the callee is a bare function or a closure, then we're all set. match adjusted_ty.kind { ty::FnDef(..) | ty::FnPtr(_) => { - let adjustments = autoderef.adjust_steps(self, Needs::None); + let adjustments = autoderef.adjust_steps(self); self.apply_adjustments(callee_expr, adjustments); return Some(CallStep::Builtin(adjusted_ty)); } @@ -135,7 +135,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &closure_sig, ) .0; - let adjustments = autoderef.adjust_steps(self, Needs::None); + let adjustments = autoderef.adjust_steps(self); self.record_deferred_call_resolution( def_id, DeferredCallResolution { @@ -176,7 +176,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.try_overloaded_call_traits(call_expr, adjusted_ty, Some(arg_exprs)) .or_else(|| self.try_overloaded_call_traits(call_expr, adjusted_ty, None)) .map(|(autoref, method)| { - let mut adjustments = autoderef.adjust_steps(self, Needs::None); + let mut adjustments = autoderef.adjust_steps(self); adjustments.extend(autoref); self.apply_adjustments(callee_expr, adjustments); CallStep::Overloaded(method) diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index 96c0d98ab0618..085bb384e124b 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -51,7 +51,7 @@ //! we may want to adjust precisely when coercions occur. use crate::astconv::AstConv; -use crate::check::{FnCtxt, Needs}; +use crate::check::FnCtxt; use rustc_errors::{struct_span_err, DiagnosticBuilder}; use rustc_hir as hir; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; @@ -421,9 +421,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { return success(vec![], ty, obligations); } - let needs = Needs::maybe_mut_place(mutbl_b); let InferOk { value: mut adjustments, obligations: o } = - autoderef.adjust_steps_as_infer_ok(self, needs); + autoderef.adjust_steps_as_infer_ok(self); obligations.extend(o); obligations.extend(autoderef.into_obligations()); diff --git a/src/librustc_typeck/check/expr.rs b/src/librustc_typeck/check/expr.rs index bc3ef73d851eb..c15d747be5299 100644 --- a/src/librustc_typeck/check/expr.rs +++ b/src/librustc_typeck/check/expr.rs @@ -29,9 +29,7 @@ use rustc_hir::{ExprKind, QPath}; use rustc_infer::infer; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; use rustc_middle::ty; -use rustc_middle::ty::adjustment::{ - Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, -}; +use rustc_middle::ty::adjustment::{Adjust, Adjustment, AllowTwoPhase}; use rustc_middle::ty::Ty; use rustc_middle::ty::TypeFoldable; use rustc_middle::ty::{AdtKind, Visibility}; @@ -113,12 +111,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.check_expr_with_expectation(expr, ExpectHasType(expected)) } - pub(super) fn check_expr_with_expectation( + fn check_expr_with_expectation_and_needs( &self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, + needs: Needs, ) -> Ty<'tcx> { - self.check_expr_with_expectation_and_needs(expr, expected, Needs::None) + let ty = self.check_expr_with_expectation(expr, expected); + + // If the expression is used in a place whether mutable place is required + // e.g. LHS of assignment, perform the conversion. + if let Needs::MutPlace = needs { + self.convert_place_derefs_to_mutable(expr); + } + + ty } pub(super) fn check_expr(&self, expr: &'tcx hir::Expr<'tcx>) -> Ty<'tcx> { @@ -143,11 +150,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { /// Note that inspecting a type's structure *directly* may expose the fact /// that there are actually multiple representations for `Error`, so avoid /// that when err needs to be handled differently. - fn check_expr_with_expectation_and_needs( + pub(super) fn check_expr_with_expectation( &self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, - needs: Needs, ) -> Ty<'tcx> { debug!(">> type-checking: expr={:?} expected={:?}", expr, expected); @@ -171,7 +177,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let old_diverges = self.diverges.replace(Diverges::Maybe); let old_has_errors = self.has_errors.replace(false); - let ty = self.check_expr_kind(expr, expected, needs); + let ty = self.check_expr_kind(expr, expected); // Warn for non-block expressions with diverging children. match expr.kind { @@ -213,9 +219,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, expr: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, - needs: Needs, ) -> Ty<'tcx> { - debug!("check_expr_kind(expr={:?}, expected={:?}, needs={:?})", expr, expected, needs,); + debug!("check_expr_kind(expr={:?}, expected={:?})", expr, expected); let tcx = self.tcx; match expr.kind { @@ -226,9 +231,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.check_expr_assign(expr, expected, lhs, rhs, span) } ExprKind::AssignOp(op, ref lhs, ref rhs) => self.check_binop_assign(expr, op, lhs, rhs), - ExprKind::Unary(unop, ref oprnd) => { - self.check_expr_unary(unop, oprnd, expected, needs, expr) - } + ExprKind::Unary(unop, ref oprnd) => self.check_expr_unary(unop, oprnd, expected, expr), ExprKind::AddrOf(kind, mutbl, ref oprnd) => { self.check_expr_addr_of(kind, mutbl, oprnd, expected, expr) } @@ -264,7 +267,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ExprKind::Block(ref body, _) => self.check_block_with_expected(&body, expected), ExprKind::Call(ref callee, ref args) => self.check_call(expr, &callee, args, expected), ExprKind::MethodCall(ref segment, span, ref args, _) => { - self.check_method_call(expr, segment, span, args, expected, needs) + self.check_method_call(expr, segment, span, args, expected) } ExprKind::Cast(ref e, ref t) => self.check_expr_cast(e, t, expr), ExprKind::Type(ref e, ref t) => { @@ -281,8 +284,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ExprKind::Struct(ref qpath, fields, ref base_expr) => { self.check_expr_struct(expr, expected, qpath, fields, base_expr) } - ExprKind::Field(ref base, field) => self.check_field(expr, needs, &base, field), - ExprKind::Index(ref base, ref idx) => self.check_expr_index(base, idx, needs, expr), + ExprKind::Field(ref base, field) => self.check_field(expr, &base, field), + ExprKind::Index(ref base, ref idx) => self.check_expr_index(base, idx, expr), ExprKind::Yield(ref value, ref src) => self.check_expr_yield(value, expr, src), hir::ExprKind::Err => tcx.types.err, } @@ -302,7 +305,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { unop: hir::UnOp, oprnd: &'tcx hir::Expr<'tcx>, expected: Expectation<'tcx>, - needs: Needs, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { let tcx = self.tcx; @@ -310,40 +312,14 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { hir::UnOp::UnNot | hir::UnOp::UnNeg => expected, hir::UnOp::UnDeref => NoExpectation, }; - let needs = match unop { - hir::UnOp::UnDeref => needs, - _ => Needs::None, - }; - let mut oprnd_t = self.check_expr_with_expectation_and_needs(&oprnd, expected_inner, needs); + let mut oprnd_t = self.check_expr_with_expectation(&oprnd, expected_inner); if !oprnd_t.references_error() { oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t); match unop { hir::UnOp::UnDeref => { - if let Some(mt) = oprnd_t.builtin_deref(true) { - oprnd_t = mt.ty; - } else if let Some(ok) = self.try_overloaded_deref(expr.span, oprnd_t, needs) { - let method = self.register_infer_ok_obligations(ok); - if let ty::Ref(region, _, mutbl) = method.sig.inputs()[0].kind { - let mutbl = match mutbl { - hir::Mutability::Not => AutoBorrowMutability::Not, - hir::Mutability::Mut => AutoBorrowMutability::Mut { - // (It shouldn't actually matter for unary ops whether - // we enable two-phase borrows or not, since a unary - // op has no additional operands.) - allow_two_phase_borrow: AllowTwoPhase::No, - }, - }; - self.apply_adjustments( - oprnd, - vec![Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)), - target: method.sig.inputs()[0], - }], - ); - } - oprnd_t = self.make_overloaded_place_return_type(method).ty; - self.write_method_call(expr.hir_id, method); + if let Some(ty) = self.lookup_derefing(expr, oprnd, oprnd_t) { + oprnd_t = ty; } else { let mut err = type_error_struct!( tcx.sess, @@ -405,8 +381,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { _ => NoExpectation, } }); - let needs = Needs::maybe_mut_place(mutbl); - let ty = self.check_expr_with_expectation_and_needs(&oprnd, hint, needs); + let ty = + self.check_expr_with_expectation_and_needs(&oprnd, hint, Needs::maybe_mut_place(mutbl)); let tm = ty::TypeAndMut { ty, mutbl }; match kind { @@ -861,10 +837,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { span: Span, args: &'tcx [hir::Expr<'tcx>], expected: Expectation<'tcx>, - needs: Needs, ) -> Ty<'tcx> { let rcvr = &args[0]; - let rcvr_t = self.check_expr_with_needs(&rcvr, needs); + let rcvr_t = self.check_expr(&rcvr); // no need to check for bot/err -- callee does that let rcvr_t = self.structurally_resolved_type(args[0].span, rcvr_t); @@ -1443,11 +1418,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn check_field( &self, expr: &'tcx hir::Expr<'tcx>, - needs: Needs, base: &'tcx hir::Expr<'tcx>, field: Ident, ) -> Ty<'tcx> { - let expr_t = self.check_expr_with_needs(base, needs); + let expr_t = self.check_expr(base); let expr_t = self.structurally_resolved_type(base.span, expr_t); let mut private_candidate = None; let mut autoderef = self.autoderef(expr.span, expr_t); @@ -1467,7 +1441,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // of error recovery. self.write_field_index(expr.hir_id, index); if field.vis.is_accessible_from(def_scope, self.tcx) { - let adjustments = autoderef.adjust_steps(self, needs); + let adjustments = autoderef.adjust_steps(self); self.apply_adjustments(base, adjustments); autoderef.finalize(self); @@ -1482,7 +1456,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let Ok(index) = fstr.parse::() { if fstr == index.to_string() { if let Some(field_ty) = tys.get(index) { - let adjustments = autoderef.adjust_steps(self, needs); + let adjustments = autoderef.adjust_steps(self); self.apply_adjustments(base, adjustments); autoderef.finalize(self); @@ -1721,10 +1695,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &self, base: &'tcx hir::Expr<'tcx>, idx: &'tcx hir::Expr<'tcx>, - needs: Needs, expr: &'tcx hir::Expr<'tcx>, ) -> Ty<'tcx> { - let base_t = self.check_expr_with_needs(&base, needs); + let base_t = self.check_expr(&base); let idx_t = self.check_expr(&idx); if base_t.references_error() { @@ -1733,7 +1706,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { idx_t } else { let base_t = self.structurally_resolved_type(base.span, base_t); - match self.lookup_indexing(expr, base, base_t, idx_t, needs) { + match self.lookup_indexing(expr, base, base_t, idx_t) { Some((index_ty, element_ty)) => { // two-phase not needed because index_ty is never mutable self.demand_coerce(idx, idx_t, index_ty, None, AllowTwoPhase::No); diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index c0f1f356ef372..21c359abf0b08 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -1,7 +1,7 @@ use super::{probe, MethodCallee}; use crate::astconv::AstConv; -use crate::check::{callee, FnCtxt, Needs}; +use crate::check::{callee, FnCtxt}; use crate::hir::def_id::DefId; use crate::hir::GenericArg; use rustc_hir as hir; @@ -145,7 +145,7 @@ impl<'a, 'tcx> ConfirmContext<'a, 'tcx> { }; assert_eq!(n, pick.autoderefs); - let mut adjustments = autoderef.adjust_steps(self, Needs::None); + let mut adjustments = autoderef.adjust_steps(self); let mut target = autoderef.unambiguous_final_ty(self); diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 82523f843aef9..b1d32213b729e 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -79,7 +79,7 @@ pub mod intrinsic; pub mod method; mod op; mod pat; -mod reconciliation; +mod place_op; mod regionck; mod upvar; mod wfcheck; @@ -115,7 +115,7 @@ use rustc_infer::infer::{InferCtxt, InferOk, InferResult, RegionVariableOrigin, use rustc_middle::hir::map::blocks::FnLikeNode; use rustc_middle::mir::interpret::ConstValue; use rustc_middle::ty::adjustment::{ - Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, PointerCast, + Adjust, Adjustment, AllowTwoPhase, AutoBorrow, AutoBorrowMutability, }; use rustc_middle::ty::fold::{TypeFoldable, TypeFolder}; use rustc_middle::ty::query::Providers; @@ -156,7 +156,6 @@ use std::slice; use crate::require_c_abi_if_c_variadic; use crate::util::common::indenter; -use self::autoderef::Autoderef; use self::callee::DeferredCallResolution; use self::coercion::{CoerceMany, DynamicCoerceMany}; use self::compare_method::{compare_const_impl, compare_impl_method, compare_ty_impl}; @@ -3618,154 +3617,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { ret_ty.builtin_deref(true).unwrap() } - fn lookup_indexing( - &self, - expr: &hir::Expr<'_>, - base_expr: &'tcx hir::Expr<'tcx>, - base_ty: Ty<'tcx>, - idx_ty: Ty<'tcx>, - needs: Needs, - ) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> { - // FIXME(#18741) -- this is almost but not quite the same as the - // autoderef that normal method probing does. They could likely be - // consolidated. - - let mut autoderef = self.autoderef(base_expr.span, base_ty); - let mut result = None; - while result.is_none() && autoderef.next().is_some() { - result = self.try_index_step(expr, base_expr, &autoderef, needs, idx_ty); - } - autoderef.finalize(self); - result - } - - /// To type-check `base_expr[index_expr]`, we progressively autoderef - /// (and otherwise adjust) `base_expr`, looking for a type which either - /// supports builtin indexing or overloaded indexing. - /// This loop implements one step in that search; the autoderef loop - /// is implemented by `lookup_indexing`. - fn try_index_step( - &self, - expr: &hir::Expr<'_>, - base_expr: &hir::Expr<'_>, - autoderef: &Autoderef<'a, 'tcx>, - needs: Needs, - index_ty: Ty<'tcx>, - ) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> { - let adjusted_ty = autoderef.unambiguous_final_ty(self); - debug!( - "try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \ - index_ty={:?})", - expr, base_expr, adjusted_ty, index_ty - ); - - for &unsize in &[false, true] { - let mut self_ty = adjusted_ty; - if unsize { - // We only unsize arrays here. - if let ty::Array(element_ty, _) = adjusted_ty.kind { - self_ty = self.tcx.mk_slice(element_ty); - } else { - continue; - } - } - - // If some lookup succeeds, write callee into table and extract index/element - // type from the method signature. - // If some lookup succeeded, install method in table - let input_ty = self.next_ty_var(TypeVariableOrigin { - kind: TypeVariableOriginKind::AutoDeref, - span: base_expr.span, - }); - let method = self.try_overloaded_place_op( - expr.span, - self_ty, - &[input_ty], - needs, - PlaceOp::Index, - ); - - let result = method.map(|ok| { - debug!("try_index_step: success, using overloaded indexing"); - let method = self.register_infer_ok_obligations(ok); - - let mut adjustments = autoderef.adjust_steps(self, needs); - if let ty::Ref(region, _, r_mutbl) = method.sig.inputs()[0].kind { - let mutbl = match r_mutbl { - hir::Mutability::Not => AutoBorrowMutability::Not, - hir::Mutability::Mut => AutoBorrowMutability::Mut { - // Indexing can be desugared to a method call, - // so maybe we could use two-phase here. - // See the documentation of AllowTwoPhase for why that's - // not the case today. - allow_two_phase_borrow: AllowTwoPhase::No, - }, - }; - adjustments.push(Adjustment { - kind: Adjust::Borrow(AutoBorrow::Ref(region, mutbl)), - target: self - .tcx - .mk_ref(region, ty::TypeAndMut { mutbl: r_mutbl, ty: adjusted_ty }), - }); - } - if unsize { - adjustments.push(Adjustment { - kind: Adjust::Pointer(PointerCast::Unsize), - target: method.sig.inputs()[0], - }); - } - self.apply_adjustments(base_expr, adjustments); - - self.write_method_call(expr.hir_id, method); - (input_ty, self.make_overloaded_place_return_type(method).ty) - }); - if result.is_some() { - return result; - } - } - - None - } - - fn resolve_place_op(&self, op: PlaceOp, is_mut: bool) -> (Option, Ident) { - let (tr, name) = match (op, is_mut) { - (PlaceOp::Deref, false) => (self.tcx.lang_items().deref_trait(), sym::deref), - (PlaceOp::Deref, true) => (self.tcx.lang_items().deref_mut_trait(), sym::deref_mut), - (PlaceOp::Index, false) => (self.tcx.lang_items().index_trait(), sym::index), - (PlaceOp::Index, true) => (self.tcx.lang_items().index_mut_trait(), sym::index_mut), - }; - (tr, Ident::with_dummy_span(name)) - } - - fn try_overloaded_place_op( - &self, - span: Span, - base_ty: Ty<'tcx>, - arg_tys: &[Ty<'tcx>], - needs: Needs, - op: PlaceOp, - ) -> Option>> { - debug!("try_overloaded_place_op({:?},{:?},{:?},{:?})", span, base_ty, needs, op); - - // Try Mut first, if needed. - let (mut_tr, mut_op) = self.resolve_place_op(op, true); - let method = match (needs, mut_tr) { - (Needs::MutPlace, Some(trait_did)) => { - self.lookup_method_in_trait(span, mut_op, trait_did, base_ty, Some(arg_tys)) - } - _ => None, - }; - - // Otherwise, fall back to the immutable version. - let (imm_tr, imm_op) = self.resolve_place_op(op, false); - match (method, imm_tr) { - (None, Some(trait_did)) => { - self.lookup_method_in_trait(span, imm_op, trait_did, base_ty, Some(arg_tys)) - } - (method, _) => method, - } - } - fn check_method_argument_types( &self, sp: Span, diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs index a3a27dc138be9..41088b0790813 100644 --- a/src/librustc_typeck/check/op.rs +++ b/src/librustc_typeck/check/op.rs @@ -1,7 +1,7 @@ //! Code related to processing overloaded binary and unary operators. use super::method::MethodCallee; -use super::{FnCtxt, Needs}; +use super::FnCtxt; use rustc_errors::{self, struct_span_err, Applicability, DiagnosticBuilder}; use rustc_hir as hir; use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; @@ -165,7 +165,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // trait matching creating lifetime constraints that are too strict. // e.g., adding `&'a T` and `&'b T`, given `&'x T: Add<&'x T>`, will result // in `&'a T <: &'x T` and `&'b T <: &'x T`, instead of `'a = 'b = 'x`. - let lhs_ty = self.check_expr_with_needs(lhs_expr, Needs::None); + let lhs_ty = self.check_expr(lhs_expr); let fresh_var = self.next_ty_var(TypeVariableOrigin { kind: TypeVariableOriginKind::MiscVariable, span: lhs_expr.span, @@ -177,7 +177,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // equivalence on the LHS of an assign-op like `+=`; // overwritten or mutably-borrowed places cannot be // coerced to a supertype. - self.check_expr_with_needs(lhs_expr, Needs::MutPlace) + self.check_expr(lhs_expr) } }; let lhs_ty = self.resolve_vars_with_obligations(lhs_ty); diff --git a/src/librustc_typeck/check/place_op.rs b/src/librustc_typeck/check/place_op.rs new file mode 100644 index 0000000000000..ce4b6f8baf917 --- /dev/null +++ b/src/librustc_typeck/check/place_op.rs @@ -0,0 +1,334 @@ +use crate::check::autoderef::Autoderef; +use crate::check::method::MethodCallee; +use crate::check::{FnCtxt, PlaceOp}; +use rustc_hir as hir; +use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind}; +use rustc_infer::infer::InferOk; +use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref, PointerCast}; +use rustc_middle::ty::adjustment::{AllowTwoPhase, AutoBorrow, AutoBorrowMutability}; +use rustc_middle::ty::{self, Ty}; +use rustc_span::symbol::{sym, Ident}; +use rustc_span::Span; + +impl<'a, 'tcx> FnCtxt<'a, 'tcx> { + pub(super) fn lookup_derefing( + &self, + expr: &hir::Expr<'_>, + oprnd: &'tcx hir::Expr<'tcx>, + oprnd_ty: Ty<'tcx>, + ) -> Option> { + if let Some(mt) = oprnd_ty.builtin_deref(true) { + return Some(mt.ty); + } + + let ok = self.try_overloaded_deref(expr.span, oprnd_ty)?; + let method = self.register_infer_ok_obligations(ok); + if let ty::Ref(region, _, hir::Mutability::Not) = method.sig.inputs()[0].kind { + self.apply_adjustments( + oprnd, + vec![Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(region, AutoBorrowMutability::Not)), + target: method.sig.inputs()[0], + }], + ); + } else { + span_bug!(expr.span, "input to deref is not a ref?"); + } + let ty = self.make_overloaded_place_return_type(method).ty; + self.write_method_call(expr.hir_id, method); + Some(ty) + } + + pub(super) fn lookup_indexing( + &self, + expr: &hir::Expr<'_>, + base_expr: &'tcx hir::Expr<'tcx>, + base_ty: Ty<'tcx>, + idx_ty: Ty<'tcx>, + ) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> { + // FIXME(#18741) -- this is almost but not quite the same as the + // autoderef that normal method probing does. They could likely be + // consolidated. + + let mut autoderef = self.autoderef(base_expr.span, base_ty); + let mut result = None; + while result.is_none() && autoderef.next().is_some() { + result = self.try_index_step(expr, base_expr, &autoderef, idx_ty); + } + autoderef.finalize(self); + result + } + + /// To type-check `base_expr[index_expr]`, we progressively autoderef + /// (and otherwise adjust) `base_expr`, looking for a type which either + /// supports builtin indexing or overloaded indexing. + /// This loop implements one step in that search; the autoderef loop + /// is implemented by `lookup_indexing`. + fn try_index_step( + &self, + expr: &hir::Expr<'_>, + base_expr: &hir::Expr<'_>, + autoderef: &Autoderef<'a, 'tcx>, + index_ty: Ty<'tcx>, + ) -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> { + let adjusted_ty = autoderef.unambiguous_final_ty(self); + debug!( + "try_index_step(expr={:?}, base_expr={:?}, adjusted_ty={:?}, \ + index_ty={:?})", + expr, base_expr, adjusted_ty, index_ty + ); + + for &unsize in &[false, true] { + let mut self_ty = adjusted_ty; + if unsize { + // We only unsize arrays here. + if let ty::Array(element_ty, _) = adjusted_ty.kind { + self_ty = self.tcx.mk_slice(element_ty); + } else { + continue; + } + } + + // If some lookup succeeds, write callee into table and extract index/element + // type from the method signature. + // If some lookup succeeded, install method in table + let input_ty = self.next_ty_var(TypeVariableOrigin { + kind: TypeVariableOriginKind::AutoDeref, + span: base_expr.span, + }); + let method = + self.try_overloaded_place_op(expr.span, self_ty, &[input_ty], PlaceOp::Index); + + let result = method.map(|ok| { + debug!("try_index_step: success, using overloaded indexing"); + let method = self.register_infer_ok_obligations(ok); + + let mut adjustments = autoderef.adjust_steps(self); + if let ty::Ref(region, _, hir::Mutability::Not) = method.sig.inputs()[0].kind { + adjustments.push(Adjustment { + kind: Adjust::Borrow(AutoBorrow::Ref(region, AutoBorrowMutability::Not)), + target: self.tcx.mk_ref( + region, + ty::TypeAndMut { mutbl: hir::Mutability::Not, ty: adjusted_ty }, + ), + }); + } else { + span_bug!(expr.span, "input to index is not a ref?"); + } + if unsize { + adjustments.push(Adjustment { + kind: Adjust::Pointer(PointerCast::Unsize), + target: method.sig.inputs()[0], + }); + } + self.apply_adjustments(base_expr, adjustments); + + self.write_method_call(expr.hir_id, method); + (input_ty, self.make_overloaded_place_return_type(method).ty) + }); + if result.is_some() { + return result; + } + } + + None + } + + /// Try to resolve an overloaded place op. We only deal with the immutable + /// variant here (Deref/Index). In some contexts we would need the mutable + /// variant (DerefMut/IndexMut); those would be later converted by + /// `convert_place_derefs_to_mutable`. + pub(super) fn try_overloaded_place_op( + &self, + span: Span, + base_ty: Ty<'tcx>, + arg_tys: &[Ty<'tcx>], + op: PlaceOp, + ) -> Option>> { + debug!("try_overloaded_place_op({:?},{:?},{:?})", span, base_ty, op); + + let (imm_tr, imm_op) = match op { + PlaceOp::Deref => (self.tcx.lang_items().deref_trait(), sym::deref), + PlaceOp::Index => (self.tcx.lang_items().index_trait(), sym::index), + }; + imm_tr.and_then(|trait_did| { + self.lookup_method_in_trait( + span, + Ident::with_dummy_span(imm_op), + trait_did, + base_ty, + Some(arg_tys), + ) + }) + } + + fn try_mutable_overloaded_place_op( + &self, + span: Span, + base_ty: Ty<'tcx>, + arg_tys: &[Ty<'tcx>], + op: PlaceOp, + ) -> Option>> { + debug!("try_mutable_overloaded_place_op({:?},{:?},{:?})", span, base_ty, op); + + let (mut_tr, mut_op) = match op { + PlaceOp::Deref => (self.tcx.lang_items().deref_mut_trait(), sym::deref_mut), + PlaceOp::Index => (self.tcx.lang_items().index_mut_trait(), sym::index_mut), + }; + mut_tr.and_then(|trait_did| { + self.lookup_method_in_trait( + span, + Ident::with_dummy_span(mut_op), + trait_did, + base_ty, + Some(arg_tys), + ) + }) + } + + /// Convert auto-derefs, indices, etc of an expression from `Deref` and `Index` + /// into `DerefMut` and `IndexMut` respectively. + /// + /// This is a second pass of typechecking derefs/indices. We need this we do not + /// always know whether a place needs to be mutable or not in the first pass. + /// This happens whether there is an implicit mutable reborrow, e.g. when the type + /// is used as the receiver of a method call. + pub fn convert_place_derefs_to_mutable(&self, expr: &hir::Expr<'_>) { + // Gather up expressions we want to munge. + let mut exprs = vec![expr]; + + loop { + match exprs.last().unwrap().kind { + hir::ExprKind::Field(ref expr, _) + | hir::ExprKind::Index(ref expr, _) + | hir::ExprKind::Unary(hir::UnOp::UnDeref, ref expr) => exprs.push(&expr), + _ => break, + } + } + + debug!("convert_place_derefs_to_mutable: exprs={:?}", exprs); + + // Fix up autoderefs and derefs. + for (i, &expr) in exprs.iter().rev().enumerate() { + debug!("convert_place_derefs_to_mutable: i={} expr={:?}", i, expr); + + // Fix up the autoderefs. Autorefs can only occur immediately preceding + // overloaded place ops, and will be fixed by them in order to get + // the correct region. + let mut source = self.node_ty(expr.hir_id); + // Do not mutate adjustments in place, but rather take them, + // and replace them after mutating them, to avoid having the + // tables borrowed during (`deref_mut`) method resolution. + let previous_adjustments = + self.tables.borrow_mut().adjustments_mut().remove(expr.hir_id); + if let Some(mut adjustments) = previous_adjustments { + for adjustment in &mut adjustments { + if let Adjust::Deref(Some(ref mut deref)) = adjustment.kind { + if let Some(ok) = self.try_mutable_overloaded_place_op( + expr.span, + source, + &[], + PlaceOp::Deref, + ) { + let method = self.register_infer_ok_obligations(ok); + if let ty::Ref(region, _, mutbl) = method.sig.output().kind { + *deref = OverloadedDeref { region, mutbl }; + } + } + } + source = adjustment.target; + } + self.tables.borrow_mut().adjustments_mut().insert(expr.hir_id, adjustments); + } + + match expr.kind { + hir::ExprKind::Index(ref base_expr, ref index_expr) => { + // We need to get the final type in case dereferences were needed for the trait + // to apply (#72002). + let index_expr_ty = self.tables.borrow().expr_ty_adjusted(index_expr); + self.convert_place_op_to_mutable( + PlaceOp::Index, + expr, + base_expr, + &[index_expr_ty], + ); + } + hir::ExprKind::Unary(hir::UnOp::UnDeref, ref base_expr) => { + self.convert_place_op_to_mutable(PlaceOp::Deref, expr, base_expr, &[]); + } + _ => {} + } + } + } + + fn convert_place_op_to_mutable( + &self, + op: PlaceOp, + expr: &hir::Expr<'_>, + base_expr: &hir::Expr<'_>, + arg_tys: &[Ty<'tcx>], + ) { + debug!("convert_place_op_to_mutable({:?}, {:?}, {:?}, {:?})", op, expr, base_expr, arg_tys); + if !self.tables.borrow().is_method_call(expr) { + debug!("convert_place_op_to_mutable - builtin, nothing to do"); + return; + } + + // Need to deref because overloaded place ops take self by-reference. + let base_ty = self + .tables + .borrow() + .expr_ty_adjusted(base_expr) + .builtin_deref(false) + .expect("place op takes something that is not a ref") + .ty; + + let method = self.try_mutable_overloaded_place_op(expr.span, base_ty, arg_tys, op); + let method = match method { + Some(ok) => self.register_infer_ok_obligations(ok), + // Couldn't find the mutable variant of the place op, keep the + // current, immutable version. + None => return, + }; + debug!("convert_place_op_to_mutable: method={:?}", method); + self.write_method_call(expr.hir_id, method); + + let region = if let ty::Ref(r, _, hir::Mutability::Mut) = method.sig.inputs()[0].kind { + r + } else { + span_bug!(expr.span, "input to mutable place op is not a mut ref?"); + }; + + // Convert the autoref in the base expr to mutable with the correct + // region and mutability. + let base_expr_ty = self.node_ty(base_expr.hir_id); + if let Some(adjustments) = + self.tables.borrow_mut().adjustments_mut().get_mut(base_expr.hir_id) + { + let mut source = base_expr_ty; + for adjustment in &mut adjustments[..] { + if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind { + debug!("convert_place_op_to_mutable: converting autoref {:?}", adjustment); + let mutbl = AutoBorrowMutability::Mut { + // Deref/indexing can be desugared to a method call, + // so maybe we could use two-phase here. + // See the documentation of AllowTwoPhase for why that's + // not the case today. + allow_two_phase_borrow: AllowTwoPhase::No, + }; + adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(region, mutbl)); + adjustment.target = + self.tcx.mk_ref(region, ty::TypeAndMut { ty: source, mutbl: mutbl.into() }); + } + source = adjustment.target; + } + + // If we have an autoref followed by unsizing at the end, fix the unsize target. + if let [.., Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. }, Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }] = + adjustments[..] + { + *target = method.sig.inputs()[0]; + } + } + } +} diff --git a/src/librustc_typeck/check/reconciliation.rs b/src/librustc_typeck/check/reconciliation.rs deleted file mode 100644 index 0a4293140a82a..0000000000000 --- a/src/librustc_typeck/check/reconciliation.rs +++ /dev/null @@ -1,150 +0,0 @@ -use crate::check::{FnCtxt, Needs, PlaceOp}; -use rustc_hir as hir; -use rustc_middle::ty::adjustment::{Adjust, Adjustment, OverloadedDeref, PointerCast}; -use rustc_middle::ty::adjustment::{AllowTwoPhase, AutoBorrow, AutoBorrowMutability}; -use rustc_middle::ty::{self, Ty}; - -impl<'a, 'tcx> FnCtxt<'a, 'tcx> { - /// Convert auto-derefs, indices, etc of an expression from `Deref` and `Index` - /// into `DerefMut` and `IndexMut` respectively. - /// - /// This is a second pass of typechecking derefs/indices. We need this we do not - /// always know whether a place needs to be mutable or not in the first pass. - /// This happens whether there is an implicit mutable reborrow, e.g. when the type - /// is used as the receiver of a method call. - pub fn convert_place_derefs_to_mutable(&self, expr: &hir::Expr<'_>) { - // Gather up expressions we want to munge. - let mut exprs = vec![expr]; - - loop { - match exprs.last().unwrap().kind { - hir::ExprKind::Field(ref expr, _) - | hir::ExprKind::Index(ref expr, _) - | hir::ExprKind::Unary(hir::UnOp::UnDeref, ref expr) => exprs.push(&expr), - _ => break, - } - } - - debug!("convert_place_derefs_to_mutable: exprs={:?}", exprs); - - // Fix up autoderefs and derefs. - for (i, &expr) in exprs.iter().rev().enumerate() { - debug!("convert_place_derefs_to_mutable: i={} expr={:?}", i, expr); - - // Fix up the autoderefs. Autorefs can only occur immediately preceding - // overloaded place ops, and will be fixed by them in order to get - // the correct region. - let mut source = self.node_ty(expr.hir_id); - // Do not mutate adjustments in place, but rather take them, - // and replace them after mutating them, to avoid having the - // tables borrowed during (`deref_mut`) method resolution. - let previous_adjustments = - self.tables.borrow_mut().adjustments_mut().remove(expr.hir_id); - if let Some(mut adjustments) = previous_adjustments { - let needs = Needs::MutPlace; - for adjustment in &mut adjustments { - if let Adjust::Deref(Some(ref mut deref)) = adjustment.kind { - if let Some(ok) = self.try_overloaded_deref(expr.span, source, needs) { - let method = self.register_infer_ok_obligations(ok); - if let ty::Ref(region, _, mutbl) = method.sig.output().kind { - *deref = OverloadedDeref { region, mutbl }; - } - } - } - source = adjustment.target; - } - self.tables.borrow_mut().adjustments_mut().insert(expr.hir_id, adjustments); - } - - match expr.kind { - hir::ExprKind::Index(ref base_expr, ref index_expr) => { - // We need to get the final type in case dereferences were needed for the trait - // to apply (#72002). - let index_expr_ty = self.tables.borrow().expr_ty_adjusted(index_expr); - self.convert_place_op_to_mutable( - PlaceOp::Index, - expr, - base_expr, - &[index_expr_ty], - ); - } - hir::ExprKind::Unary(hir::UnOp::UnDeref, ref base_expr) => { - self.convert_place_op_to_mutable(PlaceOp::Deref, expr, base_expr, &[]); - } - _ => {} - } - } - } - - fn convert_place_op_to_mutable( - &self, - op: PlaceOp, - expr: &hir::Expr<'_>, - base_expr: &hir::Expr<'_>, - arg_tys: &[Ty<'tcx>], - ) { - debug!("convert_place_op_to_mutable({:?}, {:?}, {:?}, {:?})", op, expr, base_expr, arg_tys); - if !self.tables.borrow().is_method_call(expr) { - debug!("convert_place_op_to_mutable - builtin, nothing to do"); - return; - } - - // Need to deref because overloaded place ops take self by-reference. - let base_ty = self - .tables - .borrow() - .expr_ty_adjusted(base_expr) - .builtin_deref(false) - .expect("place op takes something that is not a ref") - .ty; - - let method = self.try_overloaded_place_op(expr.span, base_ty, arg_tys, Needs::MutPlace, op); - let method = match method { - Some(ok) => self.register_infer_ok_obligations(ok), - None => return self.tcx.sess.delay_span_bug(expr.span, "re-trying op failed"), - }; - debug!("convert_place_op_to_mutable: method={:?}", method); - self.write_method_call(expr.hir_id, method); - - let (region, mutbl) = if let ty::Ref(r, _, mutbl) = method.sig.inputs()[0].kind { - (r, mutbl) - } else { - span_bug!(expr.span, "input to place op is not a ref?"); - }; - - // Convert the autoref in the base expr to mutable with the correct - // region and mutability. - let base_expr_ty = self.node_ty(base_expr.hir_id); - if let Some(adjustments) = - self.tables.borrow_mut().adjustments_mut().get_mut(base_expr.hir_id) - { - let mut source = base_expr_ty; - for adjustment in &mut adjustments[..] { - if let Adjust::Borrow(AutoBorrow::Ref(..)) = adjustment.kind { - debug!("convert_place_op_to_mutable: converting autoref {:?}", adjustment); - let mutbl = match mutbl { - hir::Mutability::Not => AutoBorrowMutability::Not, - hir::Mutability::Mut => AutoBorrowMutability::Mut { - // For initial two-phase borrow - // deployment, conservatively omit - // overloaded operators. - allow_two_phase_borrow: AllowTwoPhase::No, - }, - }; - adjustment.kind = Adjust::Borrow(AutoBorrow::Ref(region, mutbl)); - adjustment.target = - self.tcx.mk_ref(region, ty::TypeAndMut { ty: source, mutbl: mutbl.into() }); - } - source = adjustment.target; - } - - // If we have an autoref followed by unsizing at the end, fix the unsize target. - - if let [.., Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(..)), .. }, Adjustment { kind: Adjust::Pointer(PointerCast::Unsize), ref mut target }] = - adjustments[..] - { - *target = method.sig.inputs()[0]; - } - } - } -} From 5068ae1ca05b2be0c2a98206a58d894aa620b312 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Wed, 3 Jun 2020 21:19:34 -0700 Subject: [PATCH 15/34] [WIP] injects llvm intrinsic instrprof.increment for coverage reports This initial version only injects counters at the top of each function. Rust Coverage will require injecting additional counters at each conditional code branch. --- src/libcore/intrinsics.rs | 9 + src/librustc_codegen_llvm/builder.rs | 27 ++ src/librustc_codegen_llvm/context.rs | 2 + src/librustc_codegen_llvm/intrinsic.rs | 21 + src/librustc_codegen_llvm/llvm/ffi.rs | 1 + src/librustc_codegen_ssa/back/write.rs | 6 + src/librustc_codegen_ssa/mir/block.rs | 6 +- src/librustc_codegen_ssa/traits/builder.rs | 8 + src/librustc_codegen_ssa/traits/intrinsic.rs | 1 + src/librustc_hir/lang_items.rs | 2 + src/librustc_interface/tests.rs | 1 + src/librustc_middle/mir/mono.rs | 1 + src/librustc_middle/ty/instance.rs | 7 + src/librustc_middle/ty/mod.rs | 1 + src/librustc_middle/ty/structural_impls.rs | 11 +- src/librustc_mir/interpret/terminator.rs | 3 + src/librustc_mir/monomorphize/collector.rs | 5 +- src/librustc_mir/monomorphize/partitioning.rs | 2 + src/librustc_mir/shim.rs | 3 + .../transform/instrument_coverage.rs | 100 +++++ src/librustc_mir/transform/mod.rs | 3 + src/librustc_session/options.rs | 3 + src/librustc_span/symbol.rs | 1 + src/librustc_ty/instance.rs | 4 + src/rustllvm/RustWrapper.cpp | 6 + .../codegen/coverage-experiments/Cargo.lock | 5 + .../codegen/coverage-experiments/Cargo.toml | 103 +++++ .../README-THIS-IS-TEMPORARY.md | 157 ++++++++ .../src/coverage_injection_test.rs | 335 ++++++++++++++++ .../src/coverage_injection_test2.rs | 320 ++++++++++++++++ .../src/coverage_injection_test_alt.rs | 362 ++++++++++++++++++ .../coverage-experiments/src/drop_trait.rs | 25 ++ .../src/drop_trait_with_comments_prints.rs | 53 +++ .../codegen/coverage-experiments/src/for.rs | 41 ++ .../src/for_with_comments.rs | 24 ++ .../codegen/coverage-experiments/src/if.rs | 80 ++++ .../src/if_with_comments.rs | 39 ++ .../src/increment_intrinsic.rs | 11 + .../coverage-experiments/src/just_main.rs | 3 + .../coverage-experiments/src/lazy_boolean.rs | 17 + .../src/loop_break_value.rs | 15 + .../codegen/coverage-experiments/src/match.rs | 22 ++ .../src/match_with_increment.rs | 305 +++++++++++++++ .../src/match_with_increment_alt.rs | 296 ++++++++++++++ .../src/match_without_increment.mir | 0 .../src/match_without_increment.rs | 5 + .../src/match_without_increment_alt.mir | 0 ..._mark_err_status_handling_with_comments.rs | 24 ++ .../codegen/coverage-experiments/src/while.rs | 23 ++ .../coverage-experiments/src/while_clean.rs | 6 + .../src/while_early_return.rs | 10 + .../src/while_with_comments.rs | 51 +++ 52 files changed, 2561 insertions(+), 5 deletions(-) create mode 100644 src/librustc_mir/transform/instrument_coverage.rs create mode 100644 src/test/codegen/coverage-experiments/Cargo.lock create mode 100644 src/test/codegen/coverage-experiments/Cargo.toml create mode 100644 src/test/codegen/coverage-experiments/README-THIS-IS-TEMPORARY.md create mode 100644 src/test/codegen/coverage-experiments/src/coverage_injection_test.rs create mode 100644 src/test/codegen/coverage-experiments/src/coverage_injection_test2.rs create mode 100644 src/test/codegen/coverage-experiments/src/coverage_injection_test_alt.rs create mode 100644 src/test/codegen/coverage-experiments/src/drop_trait.rs create mode 100644 src/test/codegen/coverage-experiments/src/drop_trait_with_comments_prints.rs create mode 100644 src/test/codegen/coverage-experiments/src/for.rs create mode 100644 src/test/codegen/coverage-experiments/src/for_with_comments.rs create mode 100644 src/test/codegen/coverage-experiments/src/if.rs create mode 100644 src/test/codegen/coverage-experiments/src/if_with_comments.rs create mode 100644 src/test/codegen/coverage-experiments/src/increment_intrinsic.rs create mode 100644 src/test/codegen/coverage-experiments/src/just_main.rs create mode 100644 src/test/codegen/coverage-experiments/src/lazy_boolean.rs create mode 100644 src/test/codegen/coverage-experiments/src/loop_break_value.rs create mode 100644 src/test/codegen/coverage-experiments/src/match.rs create mode 100644 src/test/codegen/coverage-experiments/src/match_with_increment.rs create mode 100644 src/test/codegen/coverage-experiments/src/match_with_increment_alt.rs create mode 100644 src/test/codegen/coverage-experiments/src/match_without_increment.mir create mode 100644 src/test/codegen/coverage-experiments/src/match_without_increment.rs create mode 100644 src/test/codegen/coverage-experiments/src/match_without_increment_alt.mir create mode 100644 src/test/codegen/coverage-experiments/src/question_mark_err_status_handling_with_comments.rs create mode 100644 src/test/codegen/coverage-experiments/src/while.rs create mode 100644 src/test/codegen/coverage-experiments/src/while_clean.rs create mode 100644 src/test/codegen/coverage-experiments/src/while_early_return.rs create mode 100644 src/test/codegen/coverage-experiments/src/while_with_comments.rs diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 85076a573b528..abb35e838ea28 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1943,6 +1943,15 @@ extern "rust-intrinsic" { pub fn miri_start_panic(payload: *mut u8) -> !; } +#[cfg(not(bootstrap))] +#[cfg_attr(not(bootstrap), lang = "count_code_region")] +pub fn count_code_region(_index: u32) { + #[cfg_attr(not(bootstrap), allow(unused_unsafe))] // remove `unsafe` on bootstrap bump + unsafe { + abort() + } +} + // Some functions are defined here because they accidentally got made // available in this module on stable. See . // (`transmute` also falls into this category, but it cannot be wrapped due to the diff --git a/src/librustc_codegen_llvm/builder.rs b/src/librustc_codegen_llvm/builder.rs index f5ae9824df894..ba285b5ef38d1 100644 --- a/src/librustc_codegen_llvm/builder.rs +++ b/src/librustc_codegen_llvm/builder.rs @@ -997,6 +997,33 @@ impl BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { self.call_lifetime_intrinsic("llvm.lifetime.end.p0i8", ptr, size); } + fn instrprof_increment( + &mut self, + fn_name: &'ll Value, + hash: &'ll Value, + num_counters: &'ll Value, + index: &'ll Value, + ) -> &'ll Value { + debug!( + "instrprof_increment() with args ({:?}, {:?}, {:?}, {:?})", + fn_name, hash, num_counters, index + ); + + let llfn = unsafe { llvm::LLVMRustGetInstrprofIncrementIntrinsic(self.cx().llmod) }; + let args = &[fn_name, hash, num_counters, index]; + let args = self.check_call("call", llfn, args); + + unsafe { + llvm::LLVMRustBuildCall( + self.llbuilder, + llfn, + args.as_ptr() as *const &llvm::Value, + args.len() as c_uint, + None, + ) + } + } + fn call( &mut self, llfn: &'ll Value, diff --git a/src/librustc_codegen_llvm/context.rs b/src/librustc_codegen_llvm/context.rs index 4c810a37d4180..7ff5ac5cbdc10 100644 --- a/src/librustc_codegen_llvm/context.rs +++ b/src/librustc_codegen_llvm/context.rs @@ -749,6 +749,8 @@ impl CodegenCx<'b, 'tcx> { ifn!("llvm.lifetime.start.p0i8", fn(t_i64, i8p) -> void); ifn!("llvm.lifetime.end.p0i8", fn(t_i64, i8p) -> void); + ifn!("llvm.instrprof.increment", fn(i8p, t_i64, t_i32, t_i32) -> void); + ifn!("llvm.expect.i1", fn(i1, i1) -> i1); ifn!("llvm.eh.typeid.for", fn(i8p) -> t_i32); ifn!("llvm.localescape", fn(...) -> void); diff --git a/src/librustc_codegen_llvm/intrinsic.rs b/src/librustc_codegen_llvm/intrinsic.rs index 1e6d2e3dbb74e..7fddda99185b4 100644 --- a/src/librustc_codegen_llvm/intrinsic.rs +++ b/src/librustc_codegen_llvm/intrinsic.rs @@ -7,6 +7,8 @@ use crate::type_of::LayoutLlvmExt; use crate::va_arg::emit_va_arg; use crate::value::Value; +use log::debug; + use rustc_ast::ast; use rustc_codegen_ssa::base::{compare_simd_types, to_immediate, wants_msvc_seh}; use rustc_codegen_ssa::common::span_invalid_monomorphization_error; @@ -21,6 +23,7 @@ use rustc_middle::ty::layout::{FnAbiExt, HasTyCtxt}; use rustc_middle::ty::{self, Ty}; use rustc_middle::{bug, span_bug}; use rustc_span::Span; +use rustc_span::Symbol; use rustc_target::abi::{self, HasDataLayout, LayoutOf, Primitive}; use rustc_target::spec::PanicStrategy; @@ -86,6 +89,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { args: &[OperandRef<'tcx, &'ll Value>], llresult: &'ll Value, span: Span, + caller_instance: ty::Instance<'tcx>, ) { let tcx = self.tcx; let callee_ty = instance.monomorphic_ty(tcx); @@ -136,6 +140,23 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { let llfn = self.get_intrinsic(&("llvm.debugtrap")); self.call(llfn, &[], None) } + "count_code_region" => { + if let ty::InstanceDef::Item(fn_def_id) = caller_instance.def { + let caller_fn_path = tcx.def_path_str(fn_def_id); + debug!( + "count_code_region to llvm.instrprof.increment(fn_name={})", + caller_fn_path + ); + + let (fn_name, _len_val) = self.const_str(Symbol::intern(&caller_fn_path)); + let index = args[0].immediate(); + let hash = self.const_u64(1234); + let num_counters = self.const_u32(1); + self.instrprof_increment(fn_name, hash, num_counters, index) + } else { + bug!("intrinsic count_code_region: no src.instance"); + } + } "va_start" => self.va_start(args[0].immediate()), "va_end" => self.va_end(args[0].immediate()), "va_copy" => { diff --git a/src/librustc_codegen_llvm/llvm/ffi.rs b/src/librustc_codegen_llvm/llvm/ffi.rs index 54cf99e1c6d6c..372fb17573a4b 100644 --- a/src/librustc_codegen_llvm/llvm/ffi.rs +++ b/src/librustc_codegen_llvm/llvm/ffi.rs @@ -1360,6 +1360,7 @@ extern "C" { // Miscellaneous instructions pub fn LLVMBuildPhi(B: &Builder<'a>, Ty: &'a Type, Name: *const c_char) -> &'a Value; + pub fn LLVMRustGetInstrprofIncrementIntrinsic(M: &Module) -> &'a Value; pub fn LLVMRustBuildCall( B: &Builder<'a>, Fn: &'a Value, diff --git a/src/librustc_codegen_ssa/back/write.rs b/src/librustc_codegen_ssa/back/write.rs index c118e5ebdb72d..49054765b9dae 100644 --- a/src/librustc_codegen_ssa/back/write.rs +++ b/src/librustc_codegen_ssa/back/write.rs @@ -175,6 +175,12 @@ impl ModuleConfig { if sess.opts.debugging_opts.profile && !is_compiler_builtins { passes.push("insert-gcov-profiling".to_owned()); } + + // The rustc option `-Zinstrument_coverage` injects intrinsic calls to + // `llvm.instrprof.increment()`, which requires the LLVM `instrprof` pass. + if sess.opts.debugging_opts.instrument_coverage { + passes.push("instrprof".to_owned()); + } passes }, vec![] diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs index ef59ad486eefe..d7db657154993 100644 --- a/src/librustc_codegen_ssa/mir/block.rs +++ b/src/librustc_codegen_ssa/mir/block.rs @@ -566,7 +566,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // Handle intrinsics old codegen wants Expr's for, ourselves. let intrinsic = match def { - Some(ty::InstanceDef::Intrinsic(def_id)) => Some(bx.tcx().item_name(def_id).as_str()), + Some(ty::InstanceDef::Intrinsic(def_id)) + | Some(ty::InstanceDef::InjectedCode(def_id)) => { + Some(bx.tcx().item_name(def_id).as_str()) + } _ => None, }; let intrinsic = intrinsic.as_ref().map(|s| &s[..]); @@ -693,6 +696,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { &args, dest, terminator.source_info.span, + self.instance, ); if let ReturnDest::IndirectOperand(dst, _) = ret_dest { diff --git a/src/librustc_codegen_ssa/traits/builder.rs b/src/librustc_codegen_ssa/traits/builder.rs index caba7ebef593b..7ffc9f15bffdc 100644 --- a/src/librustc_codegen_ssa/traits/builder.rs +++ b/src/librustc_codegen_ssa/traits/builder.rs @@ -260,6 +260,14 @@ pub trait BuilderMethods<'a, 'tcx>: /// Called for `StorageDead` fn lifetime_end(&mut self, ptr: Self::Value, size: Size); + fn instrprof_increment( + &mut self, + fn_name: Self::Value, + hash: Self::Value, + num_counters: Self::Value, + index: Self::Value, + ) -> Self::Value; + fn call( &mut self, llfn: Self::Value, diff --git a/src/librustc_codegen_ssa/traits/intrinsic.rs b/src/librustc_codegen_ssa/traits/intrinsic.rs index 9d48e233de655..f62019498511c 100644 --- a/src/librustc_codegen_ssa/traits/intrinsic.rs +++ b/src/librustc_codegen_ssa/traits/intrinsic.rs @@ -15,6 +15,7 @@ pub trait IntrinsicCallMethods<'tcx>: BackendTypes { args: &[OperandRef<'tcx, Self::Value>], llresult: Self::Value, span: Span, + caller_instance: ty::Instance<'tcx>, ); fn abort(&mut self); diff --git a/src/librustc_hir/lang_items.rs b/src/librustc_hir/lang_items.rs index 83bada4041963..091ded6d74d0f 100644 --- a/src/librustc_hir/lang_items.rs +++ b/src/librustc_hir/lang_items.rs @@ -242,6 +242,8 @@ language_item_table! { StartFnLangItem, "start", start_fn, Target::Fn; + CountCodeRegionFnLangItem, "count_code_region", count_code_region_fn, Target::Fn; + EhPersonalityLangItem, "eh_personality", eh_personality, Target::Fn; EhCatchTypeinfoLangItem, "eh_catch_typeinfo", eh_catch_typeinfo, Target::Static; diff --git a/src/librustc_interface/tests.rs b/src/librustc_interface/tests.rs index 87647f3b0b017..c2a7d1a4a6102 100644 --- a/src/librustc_interface/tests.rs +++ b/src/librustc_interface/tests.rs @@ -548,6 +548,7 @@ fn test_debugging_options_tracking_hash() { tracked!(human_readable_cgu_names, true); tracked!(inline_in_all_cgus, Some(true)); tracked!(insert_sideeffect, true); + tracked!(instrument_coverage, true); tracked!(instrument_mcount, true); tracked!(link_only, true); tracked!(merge_functions, Some(MergeFunctions::Disabled)); diff --git a/src/librustc_middle/mir/mono.rs b/src/librustc_middle/mir/mono.rs index c889dbc0a4498..b2c00849d9f83 100644 --- a/src/librustc_middle/mir/mono.rs +++ b/src/librustc_middle/mir/mono.rs @@ -352,6 +352,7 @@ impl<'tcx> CodegenUnit<'tcx> { InstanceDef::VtableShim(..) | InstanceDef::ReifyShim(..) | InstanceDef::Intrinsic(..) + | InstanceDef::InjectedCode(..) | InstanceDef::FnPtrShim(..) | InstanceDef::Virtual(..) | InstanceDef::ClosureOnceShim { .. } diff --git a/src/librustc_middle/ty/instance.rs b/src/librustc_middle/ty/instance.rs index 1ce079821a22e..4f88e64c5039a 100644 --- a/src/librustc_middle/ty/instance.rs +++ b/src/librustc_middle/ty/instance.rs @@ -21,6 +21,10 @@ pub enum InstanceDef<'tcx> { Item(DefId), Intrinsic(DefId), + /// Injected call to a placeholder function that is replaced with + /// For example: `core::intrinsic::count_code_region()` for code coverage. + InjectedCode(DefId), + /// `::method` where `method` receives unsizeable `self: Self`. VtableShim(DefId), @@ -149,6 +153,7 @@ impl<'tcx> InstanceDef<'tcx> { | InstanceDef::FnPtrShim(def_id, _) | InstanceDef::Virtual(def_id, _) | InstanceDef::Intrinsic(def_id) + | InstanceDef::InjectedCode(def_id) | InstanceDef::ClosureOnceShim { call_once: def_id } | InstanceDef::DropGlue(def_id, _) | InstanceDef::CloneShim(def_id, _) => def_id, @@ -236,6 +241,7 @@ impl<'tcx> fmt::Display for Instance<'tcx> { InstanceDef::VtableShim(_) => write!(f, " - shim(vtable)"), InstanceDef::ReifyShim(_) => write!(f, " - shim(reify)"), InstanceDef::Intrinsic(_) => write!(f, " - intrinsic"), + InstanceDef::InjectedCode(_) => write!(f, " - injected-code"), InstanceDef::Virtual(_, num) => write!(f, " - virtual#{}", num), InstanceDef::FnPtrShim(_, ty) => write!(f, " - shim({:?})", ty), InstanceDef::ClosureOnceShim { .. } => write!(f, " - shim"), @@ -415,6 +421,7 @@ impl<'tcx> Instance<'tcx> { | InstanceDef::FnPtrShim(..) | InstanceDef::Item(_) | InstanceDef::Intrinsic(..) + | InstanceDef::InjectedCode(..) | InstanceDef::ReifyShim(..) | InstanceDef::Virtual(..) | InstanceDef::VtableShim(..) => Some(self.substs), diff --git a/src/librustc_middle/ty/mod.rs b/src/librustc_middle/ty/mod.rs index 93ef73171993c..9b1e717731e82 100644 --- a/src/librustc_middle/ty/mod.rs +++ b/src/librustc_middle/ty/mod.rs @@ -2717,6 +2717,7 @@ impl<'tcx> TyCtxt<'tcx> { ty::InstanceDef::VtableShim(..) | ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::Intrinsic(..) + | ty::InstanceDef::InjectedCode(..) | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::Virtual(..) | ty::InstanceDef::ClosureOnceShim { .. } diff --git a/src/librustc_middle/ty/structural_impls.rs b/src/librustc_middle/ty/structural_impls.rs index f6f5dfd651612..b6cbd2082a518 100644 --- a/src/librustc_middle/ty/structural_impls.rs +++ b/src/librustc_middle/ty/structural_impls.rs @@ -674,6 +674,7 @@ impl<'a, 'tcx> Lift<'tcx> for ty::InstanceDef<'a> { ty::InstanceDef::VtableShim(def_id) => Some(ty::InstanceDef::VtableShim(def_id)), ty::InstanceDef::ReifyShim(def_id) => Some(ty::InstanceDef::ReifyShim(def_id)), ty::InstanceDef::Intrinsic(def_id) => Some(ty::InstanceDef::Intrinsic(def_id)), + ty::InstanceDef::InjectedCode(def_id) => Some(ty::InstanceDef::Intrinsic(def_id)), ty::InstanceDef::FnPtrShim(def_id, ref ty) => { Some(ty::InstanceDef::FnPtrShim(def_id, tcx.lift(ty)?)) } @@ -846,6 +847,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> { VtableShim(did) => VtableShim(did.fold_with(folder)), ReifyShim(did) => ReifyShim(did.fold_with(folder)), Intrinsic(did) => Intrinsic(did.fold_with(folder)), + InjectedCode(did) => InjectedCode(did.fold_with(folder)), FnPtrShim(did, ty) => FnPtrShim(did.fold_with(folder), ty.fold_with(folder)), Virtual(did, i) => Virtual(did.fold_with(folder), i), ClosureOnceShim { call_once } => { @@ -861,9 +863,12 @@ impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> { use crate::ty::InstanceDef::*; self.substs.visit_with(visitor) || match self.def { - Item(did) | VtableShim(did) | ReifyShim(did) | Intrinsic(did) | Virtual(did, _) => { - did.visit_with(visitor) - } + Item(did) + | VtableShim(did) + | ReifyShim(did) + | Intrinsic(did) + | InjectedCode(did) + | Virtual(did, _) => did.visit_with(visitor), FnPtrShim(did, ty) | CloneShim(did, ty) => { did.visit_with(visitor) || ty.visit_with(visitor) } diff --git a/src/librustc_mir/interpret/terminator.rs b/src/librustc_mir/interpret/terminator.rs index cd7621ea9752b..82fa471b54d73 100644 --- a/src/librustc_mir/interpret/terminator.rs +++ b/src/librustc_mir/interpret/terminator.rs @@ -257,6 +257,9 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic); M::call_intrinsic(self, instance, args, ret, unwind) } + ty::InstanceDef::InjectedCode(..) => { + M::call_intrinsic(self, instance, args, ret, unwind) + } ty::InstanceDef::VtableShim(..) | ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::ClosureOnceShim { .. } diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index 994d1e69f2e3e..24c4226bb4e94 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -714,7 +714,9 @@ fn visit_instance_use<'tcx>( } match instance.def { - ty::InstanceDef::Virtual(..) | ty::InstanceDef::Intrinsic(_) => { + ty::InstanceDef::Virtual(..) + | ty::InstanceDef::Intrinsic(_) + | ty::InstanceDef::InjectedCode(_) => { if !is_direct_call { bug!("{:?} being reified", instance); } @@ -751,6 +753,7 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::DropGlue(..) | ty::InstanceDef::Intrinsic(_) + | ty::InstanceDef::InjectedCode(_) | ty::InstanceDef::CloneShim(..) => return true, }; diff --git a/src/librustc_mir/monomorphize/partitioning.rs b/src/librustc_mir/monomorphize/partitioning.rs index db1ea72c0a531..7c97b9d611e15 100644 --- a/src/librustc_mir/monomorphize/partitioning.rs +++ b/src/librustc_mir/monomorphize/partitioning.rs @@ -322,6 +322,7 @@ fn mono_item_visibility( | InstanceDef::FnPtrShim(..) | InstanceDef::Virtual(..) | InstanceDef::Intrinsic(..) + | InstanceDef::InjectedCode(..) | InstanceDef::ClosureOnceShim { .. } | InstanceDef::DropGlue(..) | InstanceDef::CloneShim(..) => return Visibility::Hidden, @@ -717,6 +718,7 @@ fn characteristic_def_id_of_mono_item<'tcx>( | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::ClosureOnceShim { .. } | ty::InstanceDef::Intrinsic(..) + | ty::InstanceDef::InjectedCode(..) | ty::InstanceDef::DropGlue(..) | ty::InstanceDef::Virtual(..) | ty::InstanceDef::CloneShim(..) => return None, diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs index f95fd9b9e90c5..b4477d9c86d43 100644 --- a/src/librustc_mir/shim.rs +++ b/src/librustc_mir/shim.rs @@ -109,6 +109,9 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<' ty::InstanceDef::Intrinsic(_) => { bug!("creating shims from intrinsics ({:?}) is unsupported", instance) } + ty::InstanceDef::InjectedCode(_) => { + bug!("creating shims from injected code ({:?}) is unsupported", instance) + } }; debug!("make_shim({:?}) = untransformed {:?}", instance, result); diff --git a/src/librustc_mir/transform/instrument_coverage.rs b/src/librustc_mir/transform/instrument_coverage.rs new file mode 100644 index 0000000000000..045cd03d1f7da --- /dev/null +++ b/src/librustc_mir/transform/instrument_coverage.rs @@ -0,0 +1,100 @@ +use crate::transform::{MirPass, MirSource}; +use rustc_index::vec::Idx; +use rustc_middle::mir::interpret::Scalar; +use rustc_middle::mir::*; +use rustc_middle::mir::{Local, LocalDecl}; +use rustc_middle::ty; +use rustc_middle::ty::Ty; +use rustc_middle::ty::TyCtxt; +use rustc_span::def_id::DefId; +use rustc_span::Span; + +pub struct InstrumentCoverage; + +/** + * Inserts call to count_code_region() as a placeholder to be replaced during code generation with + * the intrinsic llvm.instrprof.increment. + */ + +// FIXME(richkadel): As a first step, counters are only injected at the top of each function. +// The complete solution will inject counters at each conditional code branch. + +impl<'tcx> MirPass<'tcx> for InstrumentCoverage { + fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) { + if tcx.sess.opts.debugging_opts.instrument_coverage { + if let Some(callee_fn_def_id) = tcx.lang_items().count_code_region_fn() { + debug!("instrumenting {:?}", src.def_id()); + instrument_coverage(tcx, callee_fn_def_id, body); + } + } + } +} + +pub fn instrument_coverage<'tcx>( + tcx: TyCtxt<'tcx>, + callee_fn_def_id: DefId, + body: &mut Body<'tcx>, +) { + let span = body.span.shrink_to_lo(); + + let ret_ty = tcx.fn_sig(callee_fn_def_id).output(); + let ret_ty = ret_ty.no_bound_vars().unwrap(); + let substs = tcx.mk_substs(::std::iter::once(ty::subst::GenericArg::from(ret_ty))); + + let count_code_region_fn: Operand<'_> = + Operand::function_handle(tcx, callee_fn_def_id, substs, span); + + let index = const_int_operand(tcx, span.clone(), tcx.types.u32, 0); + + let args = vec![index]; + + let source_info = SourceInfo { span: span, scope: OUTERMOST_SOURCE_SCOPE }; + + let new_block = START_BLOCK + body.basic_blocks().len(); + + let next_local = body.local_decls.len(); + let new_temp = Local::new(next_local); + let unit_temp = Place::from(new_temp); + + let storage_live = Statement { source_info, kind: StatementKind::StorageLive(new_temp) }; + let storage_dead = Statement { source_info, kind: StatementKind::StorageDead(new_temp) }; + + let count_code_region_call = TerminatorKind::Call { + func: count_code_region_fn, + args, + destination: Some((unit_temp, new_block)), + cleanup: None, + from_hir_call: false, + }; + + body.local_decls.push(LocalDecl::new(tcx.mk_unit(), body.span)); + body.basic_blocks_mut().push(BasicBlockData { + statements: vec![storage_live], + is_cleanup: false, + terminator: Some(Terminator { source_info, kind: count_code_region_call }), + }); + + body.basic_blocks_mut().swap(START_BLOCK, new_block); + body[new_block].statements.push(storage_dead); + + // FIXME(richkadel): ALSO add each computed Span for each conditional branch to the coverage map + // and provide that map to LLVM to encode in the final binary. +} + +fn const_int_operand<'tcx>( + tcx: TyCtxt<'tcx>, + span: Span, + ty: Ty<'tcx>, + val: u128, +) -> Operand<'tcx> { + let param_env_and_ty = ty::ParamEnv::empty().and(ty); + let size = tcx + .layout_of(param_env_and_ty) + .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e)) + .size; + Operand::Constant(box Constant { + span, + user_ty: None, + literal: ty::Const::from_scalar(tcx, Scalar::from_uint(val, size), ty), + }) +} diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index 4240b528a6124..e03ef48f74838 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -28,6 +28,7 @@ pub mod elaborate_drops; pub mod generator; pub mod inline; pub mod instcombine; +pub mod instrument_coverage; pub mod no_landing_pads; pub mod nrvo; pub mod promote_consts; @@ -287,6 +288,8 @@ fn mir_validated( &[&[ // What we need to run borrowck etc. &promote_pass, + // FIXME(richkadel): is this the best place for the InstrumentCoverage pass? + &instrument_coverage::InstrumentCoverage, &simplify::SimplifyCfg::new("qualify-consts"), ]], ); diff --git a/src/librustc_session/options.rs b/src/librustc_session/options.rs index d22c6ec9d7d01..599ce595e1314 100644 --- a/src/librustc_session/options.rs +++ b/src/librustc_session/options.rs @@ -876,6 +876,9 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "fix undefined behavior when a thread doesn't eventually make progress \ (such as entering an empty infinite loop) by inserting llvm.sideeffect \ (default: no)"), + instrument_coverage: bool = (false, parse_bool, [TRACKED], + "instrument the generated code with LLVM code region counters for \ + generating coverage reports (default: no)"), instrument_mcount: bool = (false, parse_bool, [TRACKED], "insert function instrument code for mcount-based tracing (default: no)"), keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED], diff --git a/src/librustc_span/symbol.rs b/src/librustc_span/symbol.rs index fdeb58b7b7a31..623c279734733 100644 --- a/src/librustc_span/symbol.rs +++ b/src/librustc_span/symbol.rs @@ -240,6 +240,7 @@ symbols! { copy_closures, core, core_intrinsics, + count_code_region, crate_id, crate_in_paths, crate_local, diff --git a/src/librustc_ty/instance.rs b/src/librustc_ty/instance.rs index 0acf769168137..d4ceeff324450 100644 --- a/src/librustc_ty/instance.rs +++ b/src/librustc_ty/instance.rs @@ -35,6 +35,10 @@ fn resolve_instance<'tcx>( debug!(" => intrinsic"); ty::InstanceDef::Intrinsic(def_id) } + ty::FnDef(def_id, _) if Some(def_id) == tcx.lang_items().count_code_region_fn() => { + debug!(" => injected placeholder function to be replaced"); + ty::InstanceDef::InjectedCode(def_id) + } ty::FnDef(def_id, substs) if Some(def_id) == tcx.lang_items().drop_in_place_fn() => { let ty = substs.type_at(0); diff --git a/src/rustllvm/RustWrapper.cpp b/src/rustllvm/RustWrapper.cpp index 4704622922af0..cdb3a157eab97 100644 --- a/src/rustllvm/RustWrapper.cpp +++ b/src/rustllvm/RustWrapper.cpp @@ -5,6 +5,7 @@ #include "llvm/IR/DiagnosticPrinter.h" #include "llvm/IR/GlobalVariable.h" #include "llvm/IR/Instructions.h" +#include "llvm/IR/Intrinsics.h" #include "llvm/Object/Archive.h" #include "llvm/Object/ObjectFile.h" #include "llvm/Bitcode/BitcodeWriterPass.h" @@ -1364,6 +1365,11 @@ extern "C" LLVMValueRef LLVMRustBuildCall(LLVMBuilderRef B, LLVMValueRef Fn, unwrap(Fn), makeArrayRef(unwrap(Args), NumArgs), Bundles)); } +extern "C" LLVMValueRef LLVMRustGetInstrprofIncrementIntrinsic(LLVMModuleRef M) { + return wrap(llvm::Intrinsic::getDeclaration(unwrap(M), + (llvm::Intrinsic::ID)llvm::Intrinsic::instrprof_increment)); +} + extern "C" LLVMValueRef LLVMRustBuildMemCpy(LLVMBuilderRef B, LLVMValueRef Dst, unsigned DstAlign, LLVMValueRef Src, unsigned SrcAlign, diff --git a/src/test/codegen/coverage-experiments/Cargo.lock b/src/test/codegen/coverage-experiments/Cargo.lock new file mode 100644 index 0000000000000..132469cbb182c --- /dev/null +++ b/src/test/codegen/coverage-experiments/Cargo.lock @@ -0,0 +1,5 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +[[package]] +name = "coverage_experiments" +version = "0.1.0" diff --git a/src/test/codegen/coverage-experiments/Cargo.toml b/src/test/codegen/coverage-experiments/Cargo.toml new file mode 100644 index 0000000000000..296a8d5c9af2d --- /dev/null +++ b/src/test/codegen/coverage-experiments/Cargo.toml @@ -0,0 +1,103 @@ +[workspace] + +[package] +name = "coverage_experiments" +version = "0.1.0" +license = "BSD-3-Clause" +authors = ["rust-fuchsia@fuchsia.com"] +edition = "2018" + +[[bin]] + +name = "coverage_injection_test" +path = "src/coverage_injection_test.rs" + +[[bin]] + +name = "coverage_injection_test2" +path = "src/coverage_injection_test2.rs" + +[[bin]] + +name = "while" +path = "src/while.rs" + +[[bin]] + +name = "while_clean" +path = "src/while_clean.rs" + +[[bin]] + +name = "while_early_return" +path = "src/while_early_return.rs" + +[[bin]] + +name = "if_with_comments" +path = "src/if_with_comments.rs" + +[[bin]] + +name = "if" +path = "src/if.rs" + +[[bin]] + +name = "increment_intrinsic" +path = "src/increment_intrinsic.rs" + +[[bin]] + +name = "just_main" +path = "src/just_main.rs" + +[[bin]] + +name = "lazy_boolean" +path = "src/lazy_boolean.rs" + +[[bin]] + +name = "match" +path = "src/match.rs" + +[[bin]] + +name = "match_without_increment" +path = "src/match_without_increment.rs" # identical to -Zunpretty=hir output + +[[bin]] + +name = "match_with_increment" +path = "src/match_with_increment.rs" + +[[bin]] + +name = "match_with_increment_alt" +path = "src/match_with_increment_alt.rs" + +[[bin]] + +name = "loop_break_value" +path = "src/loop_break_value.rs" + +[[bin]] + +name = "for_with_comments" +path = "src/for_with_comments.rs" + +[[bin]] + +name = "for" +path = "src/for.rs" + +[[bin]] + +name = "drop_trait" +path = "src/drop_trait.rs" + +#[dependencies] # Should not need to manually add coverage dependencies +#version = "0.1.0" +#path = "../__builtin" # for mod __builtin::coverage + diff --git a/src/test/codegen/coverage-experiments/README-THIS-IS-TEMPORARY.md b/src/test/codegen/coverage-experiments/README-THIS-IS-TEMPORARY.md new file mode 100644 index 0000000000000..3b69c0a406594 --- /dev/null +++ b/src/test/codegen/coverage-experiments/README-THIS-IS-TEMPORARY.md @@ -0,0 +1,157 @@ +# codegen/coverage-experiments +*

THIS DIRECTORY IS TEMPORARY

* + +This directory contains some work-in-progress (WIP) code used for experimental development and +testing of Rust Coverage feature development. + +The code in this directory will be removed, or migrated into product tests, when the Rust +Coverage feature is complete. + +[TOC] + +## Development Notes + +### config.toml + +config.toml probably requires (I should verify that intrinsic `llvm.instrprof.increment` +code generation ONLY works with this config option): + + profiler = true + +## First build + +```shell +./x.py clean +./x.py build -i --stage 1 src/libstd +``` + +## Incremental builds *IF POSSIBLE!* + +```shell +./x.py build -i --stage 1 src/libstd --keep-stage 1 +``` + +*Note: Some changes made for Rust Coverage required the full build (without `--keep-stage 1`), and in some cases, required `./x.py clean` first!. Occassionally I would get errors when building or when compiling a test program with `--Zinstrument-coverage` that work correctly only after a full clean and build.* + +## Compile a test program with LLVM coverage instrumentation + +*Note: This PR is still a work in progress. At the time of this writing, the `llvm.instrprof.increment` intrinsic is injected, and recognized by the LLVM code generation stage, but it does not appear to be included in the final binary. This is not surprising since other steps are still to be implemented, such as generating the coverage map. See the suggested additional `llvm` flags for ways to verify the `llvm` passes at least get the right intrinsic.* + +Suggested debug configuration to confirm Rust coverage features: +```shell +$ export RUSTC_LOG=rustc_codegen_llvm::intrinsic,rustc_mir::transform::instrument_coverage=debug +``` + +Ensure the new compiled `rustc` is used (the path below, relative to the `rust` code repository root, is an example only): + +```shell +$ build/x86_64-unknown-linux-gnu/stage1/bin/rustc \ + src/test/codegen/coverage-experiments/just_main.rs \ + -Zinstrument-coverage +``` + +### About the test programs in coverage-experiments/src/ + +The `coverage-experiments/src/` directory contains some sample (and very simple) Rust programs used to analyze Rust compiler output at various stages, with or without the Rust code coverage compiler option. For now, these are only used for the in-progress development and will be removed at a future date. (These are *not* formal test programs.) + +The src director may also contain some snapshots of mir output from experimentation, particularly if the saved snapshots highlight results that are important to the future development, individually or when compared with other output files. + +Be aware that some of the files and/or comments may be outdated. + +### Additional `llvm` flags (append to the `rustc` command) + +These optional flags generate additional files and/or terminal output. LLVM's `-print-before=all` should show the `instrprof.increment` intrinsic with arguments computed by the experimental Rust coverage feature code: + +```shell + --emit llvm-ir \ + -Zverify-llvm-ir \ + -Zprint-llvm-passes \ + -Csave-temps \ + -Cllvm-args=-print-before-all +``` + +### Additional flags for MIR analysis and transforms + +These optional flags generate a directory with many files representing the MIR as text (`.mir` files) and as a visual graph (`.dot` files) rendered by `graphviz`. (**Some IDEs, such as `VSCode` have `graphviz` extensions.**) + +```shell + -Zdump-mir=main \ + -Zdump-mir-graphviz +``` + +### Flags I've used but appear to be irrelvant to `-Zinstrument-coverage` after all: +```shell + # -Zprofile + # -Ccodegen-units=1 + # -Cinline-threshold=0 + # -Clink-dead-code + # -Coverflow-checks=off +``` + +## Run the test program compiled with code coverage instrumentation (maybe): + +As stated above, at the time of this writing, this work-in-progress seems to generate `llvm.instrprof.increment` intrinsic calls correctly, and are visibile in early `llvm` code generation passes, but are eventually stripped. + +The test program should run as expected, currently does not generate any coverage output. + +*Example:* + +```shell + $ src/test/codegen/coverage-experiments/just_main + hello world! (should be covered) +``` + +### Running the coverage-enabled `rustc` compiler in the `lldb` debugger: + +For example, to verify the intrinsic is codegen'ed, set breakpoint in `lldb` where it validates a certain instruction is the `llvm.instrprof.increment` instruction. + +First, update config.toml for debugging: + +```toml + [llvm] + optimize = false + release-debuginfo = true + + [rust] + debug = true + debuginfo-level = 2 +``` + +*(Note, in case this is relevant after all, I also have the following changes; but I don't think I need them:)* + +```toml + # Add and uncomment these if relevant/useful: + # codegen-units = 0 + # python = '/usr/bin/python3.6' +``` + +Run the compiler with additional flags as needed: + +```shell +lldb \ + build/x86_64-unknown-linux-gnu/stage1/bin/rustc \ + -- \ + src/test/codegen/coverage-experiments/just_main.rs \ + -Zinstrument-coverage \ + -Zdump-mir=main \ + -Zdump-mir-graphviz +``` + +Note the specific line numbers may be different: + +```c++ +(lldb) b lib/Transforms/Instrumentation/InstrProfiling.cpp:418 +(lldb) r + +Process 93855 stopped +* thread #6, name = 'rustc', stop reason = breakpoint 2.1 + frame #0: 0x00007fffedff7738 librustc_driver-5a0990d8d18fb2b4.so`llvm::InstrProfiling::lowerIntrinsics(this=0x00007fffcc001d40, F=0x00007fffe4552198) at InstrProfiling.cpp:418:23 + 415 auto Instr = I++; + 416 InstrProfIncrementInst *Inc = castToIncrementInst(&*Instr); + 417 if (Inc) { +-> 418 lowerIncrement(Inc); + 419 MadeChange = true; + 420 } else if (auto *Ind = dyn_cast(Instr)) { + 421 lowerValueProfileInst(Ind); +(lldb) +``` \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/coverage_injection_test.rs b/src/test/codegen/coverage-experiments/src/coverage_injection_test.rs new file mode 100644 index 0000000000000..231da1dc1a67f --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/coverage_injection_test.rs @@ -0,0 +1,335 @@ +/* */ use std::io::Error; +/* */ use std::io::ErrorKind; +/* */ +/* */ /// Align Rust counter increment with with: +/* */ /// [‘llvm.instrprof.increment’ Intrinsic](https://llvm.org/docs/LangRef.html#llvm-instrprof-increment-intrinsic) +/* */ /// +/* */ /// declare void @llvm.instrprof.increment(i8* , i64 , i32 , i32 ) +/* */ /// +/* */ /// The first argument is a pointer to a global variable containing the name of the entity +/* */ /// being instrumented. This should generally be the (mangled) function name for a set of +/* */ /// counters. +/* */ /// +/* */ /// The second argument is a hash value that can be used by the consumer of the profile data +/* */ /// to detect changes to the instrumented source, and the third is the number of counters +/* */ /// associated with name. It is an error if hash or num-counters differ between two +/* */ /// instances of instrprof.increment that refer to the same name. +/* */ /// +/* */ /// The last argument refers to which of the counters for name should be incremented. It +/* */ /// should be a value between 0 and num-counters. +/* */ /// +/* */ /// # Arguments +/* */ /// +/* */ /// `mangled_fn_name` - &'static ref to computed and injected static str, using: +/* */ /// +/* */ /// ``` +/* */ /// fn rustc_symbol_mangling::compute_symbol_name( +/* */ /// tcx: TyCtxt<'tcx>, +/* */ /// instance: Instance<'tcx>, +/* */ /// compute_instantiating_crate: impl FnOnce() -> CrateNum, +/* */ /// ) -> String +/* */ /// ``` +/* */ /// +/* */ /// `source_version_hash` - Compute hash based that only changes if there are "significant" +/* */ /// to control-flow inside the function. +/* */ /// +/* */ /// `num_counters` - The total number of counter calls [MAX(counter_index) + 1] within the +/* */ /// function. +/* */ /// +/* */ /// `counter_index` - zero-based counter index scoped by the function. (Ordering of +/* */ /// counters, relative to the source code location, is apparently not expected.) +/* */ /// +/* */ /// # Notes +/* */ /// +/* */ /// * The mangled_fn_name may not be computable until generics are monomorphized (see +/* */ /// parameters required by rustc_symbol_mangling::compute_symbol_name). +/* */ /// * The version hash may be computable from AST analysis, and may not benefit from further +/* */ /// lowering. +/* */ /// * num_counters depends on having already identified all counter insertion locations. +/* */ /// * counter_index can be computed at time of counter insertion (incrementally). +/* */ /// * Numeric parameters are signed to match the llvm increment intrinsic parameter types. +/* */ fn __lower_incr_cov(_mangled_fn_name: &'static str, _fn_version_hash: i64, _num_counters: i32, _counter_index: i32) { +/* */ } +/* */ +/* */ /// A coverage counter implementation that will work as both an intermediate coverage +/* */ /// counting and reporting implementation at the AST-level only--for debugging and +/* */ /// development--but also serves as a "marker" to be replaced by calls to LLVM +/* */ /// intrinsic coverage counter APIs during the lowering process. +/* */ /// +/* */ /// Calls to this function will be injected automatically into the AST. When LLVM intrinsics +/* */ /// are enabled, the counter function calls that were injected into the AST serve as +/* */ /// placeholders, to be replaced by an alternative, such as: +/* */ /// +/* */ /// * direct invocation of the `llvm.instrprof.increment()` intrinsic; or +/* */ /// * the `__lower_incr_cov()` function, defined above, that would invoke the +/* */ /// `llvm.instrprof.increment()` intrinsic; or +/* */ /// * a similar expression wrapper, with the additional parameters (as defined above +/* */ /// for `__lower_incr_cov()`, that invokes `llvm.instrprof.increment()` and returns the +/* */ /// result of the wrapped expression) +/* */ /// +/* */ /// The first two options would require replacing the inlined wrapper call with something +/* */ /// like: +/* */ /// +/* */ /// ``` +/* */ /// { let result = {expr}; __inlined_incr_cov(context, counter); result } +/* */ /// ``` +/* */ /// +/* */ /// But if the lowering process is already unwrapping the inlined call to `__incr_cov()`, then +/* */ /// it may be a perfect opportunity to replace the function with one of these more +/* */ /// direct methods. +/* */ /// +/* */ #[inline(always)] +/* */ pub fn __incr_cov(region_loc: &str, /*index: u32,*/ result: T) -> T { +/* */ // Either call the intermediate non-llvm coverage counter API or +/* */ // replace the call to this function with the expanded `__lower_incr_cov()` call. +/* */ +/* */ // let _lock = increment_counter(counter); +/* */ println!("{}", region_loc); +/* */ +/* */ result +/* */ } +/* */ +/* */ /// Write a report identifying each incremented counter and the number of times each counter +/* */ /// was incremented. +/* */ fn __report() { +/* */ println!("WRITE REPORT!"); +/* */ } +/* */ +/* */ /// Increment the counter after evaluating the wrapped expression (see `__incr_cov()`), then +/* */ /// write a report identifying each incremented counter and the number of times each counter +/* */ /// was incremented. +/* */ #[inline(always)] +/* */ pub fn __incr_cov_and_report(region_loc: &str, /*counter: u32,*/ result: T) -> T { +/* */ __incr_cov(region_loc, /*counter,*/ ()); +/* */ __report(); +/* */ result +/* */ } +/* */ +/* */ macro_rules! from { +/* */ ($from:expr) => { &format!("from: {}\n to: {}:{}:{}", $from, file!(), line!(), column!()) }; +/* */ } +/* */ +/* */ #[derive(Debug)] +/* */ enum TestEnum { +/* */ Red, +/* */ Green, +/* */ Blue, +/* */ } +/* */ +/* */ struct TestStruct { +/* */ field: i32, +/* */ } +/* */ +/* */ // IMPORTANT! IS WRAPPING main() ENOUGH? OR DO I ALSO NEED TO WRAP THREAD FUNCTIONS, ASSUMING +/* */ // THEY ARE STILL RUNNING WITH MAIN EXITS? (IF THEY CAN). NOT SURE HOW RUST HANDLES THAT. +/* */ +/* */ // I SUSPECT USING THREAD_LOCAL COUNTERS MAY NOT ACTUALLY BE AN OPTIMIZATION OVER MUTEX LOCKS, +/* */ // BUT MAYBE I SHOULD ASK. +/* */ +/* */ impl TestStruct { +/* - */ fn new() -> Self { +/* ┃ */ __incr_cov(from!("fn new()"),Self::new_with_value(31415)) // function-scoped counter index = 0 +/* - */ } +/* */ +/* - */ fn new_with_value(field: i32) -> Self { +/* ┃ */ __incr_cov(from!("fn new_with_value()"),Self { +/* ┃ */ field, +/* ┃ */ }) // function-scoped counter index = 0 +/* - */ } +/* */ +/* */ fn call_closure(&self, closure: F) -> bool +/* */ where +/* */ F: FnOnce( +/* */ i32, +/* */ ) -> bool, +/* - */ { +/* ┃ */ __incr_cov(from!("fn call_closure()"),closure(123)) // function-scoped counter index = 0 +/* - */ } +/* */ +/* - */ fn various(&self) -> Result<(),Error> { +/* ┃ */ use TestEnum::*; +/* ┃ */ let mut color = Red; +/* ┃ */ let _ = color; +/* ┃ */ color = Blue; +/* ┃ */ let _ = color; +/* ┃ */ color = Green; +/* ┃ */ match __incr_cov(from!("fn various"),color) { // function-scoped counter index = 0 +/* : */ +/* : */ // !!! RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK (THE FUNCTION IN THIS CASE) TO END OF MATCH EXPRESSION +/* : */ // If `match`, `while`, `loop`, `for`, `if`, etc. expression has a `return`, `break`, or `continue` +/* : */ // (if legal), then RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK TO END OF `return` EXPRESSION +/* : */ // If the expression includes lazy booleans, nest calls to `__incr_cov()`. +/* : I */ Red => __incr_cov(from!("Red => or end of MatchArmGuard expression inside pattern, if any"),println!("roses")), +/* : - */ Green => { +/* : ┃ */ let spidey = 100; +/* : ┃ */ let goblin = 50; +/* : ┃ */ // if spidey > goblin {__incr_cov(from!(""),{ +/* : ┃ */ // println!("what ev"); +/* : ┃ */ // })} +/* : ┃ */ // ACTUALLY, WRAPPING THE ENTIRE IF BLOCK IN `__incr_cov` IS NOT A GREAT GENERAL RULE. +/* : ┃ */ // JUST INSERTING A `return`, `break`, or `continue` IN THAT BLOCK (without an intermediate condition) +/* : ┃ */ // MAKES THE `__incr_cov()` CALL UNREACHABLE! +/* : ┃ */ // MY ORIGINAL SOLUTION WORKS BETTER (WRAP LAST EXPRESSION OR AFTER LAST SEMICOLON STATEMENT IN BLOCK) +/* : ┃ */ // UNLESS THE EXPRESSION IS NOT A BLOCK. +/* : ┃ - */ if __incr_cov(from!("Green => or end of MatchArmGuard expression inside pattern, if any"),spidey > goblin) { +/* : : ┃ */ println!("spidey beats goblin"); +/* : : ┃ */ __incr_cov(from!("block start"),()); +/* : ┃ - */ } else if __incr_cov(from!("`else if` on this line"),spidey == goblin) { +/* : : ┃ */ // COVERAGE NOTE: Do we mark only the expression span (that may be trivial, as in this case), +/* : : ┃ */ // or associate it with the outer block, similar to how the `if` expression is associated with +/* : : ┃ */ // the outer block? (Although it is a continuation, in a sense, it is discontiguous in this case, +/* : : ┃ */ // so I think simpler to just make it its own coverage region.) +/* : : ┃ */ println!("it's a draw"); +/* : : ┃ */ __incr_cov(from!("block start"),()); +/* : ┃ - - - */ } else if if __incr_cov(from!("`else if` on this line"),true) { +/* : : : ┃ */ // return __incr_cov(from!("after `if true`"),Ok(())); +/* : : : ┃ */ // ACTUALLY, BECAUSE OF `return`, WE DO NOT RECORD THE `if true` EVEN THOUGH WE COVERED IT. +/* : : : ┃ */ // IN FACT, IF THIS NESTED CONDITIONAL IN A CONDITIONAL EXPRESSION WAS AN `if` (WITHOUT PRECEDING ELSE) +/* : : : ┃ */ // WE WOULD NOT HAVE RECORDED THE COVERAGE OF STATEMENTS LEADING UP TO THE `if`, SO +/* : : : ┃ */ // IT SHOULD BE: +/* ┏-:---:-------:---< */ return __incr_cov(from!(""),Ok(())); +/* V : : : : */ // NOTE THE `from` STRING IS SAME FOR THE `else if`s `__incr_cov` AND THIS `return`. +/* : : : : */ // ONLY ONE OF THESE WILL EXECUTE, TO RECORD COVERAGE FROM THAT SPOT. +/* : : ┃ - */ } else { +/* : : : I */ __incr_cov(from!("`else`"),false) +/* : : - - */ } { +/* : : ┃ */ println!("wierd science"); +/* : : ┃ */ __incr_cov(from!("block start"),()); +/* : ┃ - */ } else { +/* : : ┃ */ println!("goblin wins"); +/* ┏-:---:---< */ return __incr_cov(from!("`else`"),Ok(())); // THIS COUNTS LAST STATEMENT IN `else` BLOCK +/* V : : : */ // COVERAGE NOTE: When counting the span for `return`, +/* : : : */ // `break`, or `continue`, also report the outer spans +/* : : : */ // got this far--including this `else` block. Record +/* : : : */ // The start positions for those outer blocks, but: +/* : : : */ // * For the block containing the `return`, `break`, or +/* : : : */ // `continue`, end report the end position is the +/* : : : */ // start of the `return` span (or 1 char before it). +/* : : : */ // * Anything else? +/* : ┃ - */ } +/* : ┃ - */ // __incr_cov(from!(""),()); // DO NOT COUNT HERE IF NO STATEMENTS AFTER LAST `if` or `match` +/* : - */ }, +/* : I */ Blue => __incr_cov(from!("Blue => or end of MatchArmGuard expression inside pattern, if any"),println!("violets")), +/* ┃ */ } +/* ┃ */ +/* ┃ */ let condition1 = true; +/* ┃ */ let condition2 = false; +/* ┃ */ let condition3 = true; +/* ┃ */ +/* ┃ */ println!("Called `various()` for TestStruct with field={}", self.field); +/* ┃ */ +/* ┃ - */ if __incr_cov(from!("after block end of prior `match` (or `if-else if-else`)"),condition1) { +/* : ┃ */ println!("before while loop"); +/* : ┃ */ let mut countdown = 10; +/* : ┃ */ __incr_cov(from!("block start"),()); // Must increment before repeated while text expression +/* : : I */ while __incr_cov(from!("while test"), countdown > 0) { // span is just the while test expression +/* : : ┃ */ println!("top of `while` loop"); +/* : : ┃ */ countdown -= 1; +/* : : ┃ */ // __incr_cov(from!("while loop"),()); // Counter not needed, but span is computed as "while test" minus "block start" +/* : : ┃ */ // If test expression is 11, and the outer block runs only once, 11-1 = 10 +/* : ┃ - */ } +/* : ┃ */ println!("before for loop"); +/* : ┃ - */ for index in __incr_cov(from!("end of while"),0..10) { +/* : : ┃ */ println!("top of `for` loop"); +/* : : ┃ - */ if __incr_cov(from!("block start"),index == 8) { +/* : : : ┃ */ println!("before break"); +/* : : : ┃ */ // note the following is not legal here: +/* : : : ┃ */ // "can only break with a value inside `loop` or breakable block" +/* : : : ┃ */ // break __incr_cov(from!(""),()); +/* : : : ┃ */ __incr_cov(from!("block start"),()); +/* : : ┏-----< */ break; +/* : : V : : */ +/* : : : : */ // FIXME(richkadel): add examples with loop labels, breaking out of inner and outer loop to outer loop label, with expression. +/* : : : : */ // May want to record both the span and the start position after the broken out block depdnding on label +/* : : ┃ - */ } +/* : : ┃ */ println!("after `break` test"); +/* : : ┃ - */ if __incr_cov(from!("block end of `if index == 8`"),condition2) { +/* ┏-:---:---:---< */ return __incr_cov(from!("block start"),Ok(())); +/* V : : ┃ - */ } +/* : : ┃ */ +/* : : ┃ */ // BECAUSE THE PREVIOUS COVERAGE REGION HAS A `return`, THEN +/* : : ┃ */ // IF PREVIOUS COVERAGE REGION IS NOT COUNTED THEN OUTER REGION REACHED HERE. +/* : : ┃ */ // ADD A COVERAGE REGION FOR THE SPAN FROM JUST AFTER PREVIOUS REGION TO END +/* : : ┃ */ // OF OUTER SPAN, THEN TRUNCATE TO NEXT REGION NOT REACHED. +/* : : ┃ - */ if index % 3 == 2 { // NO __incr_cov() HERE BECAUSE NO STATEMENTS BETWEEN LAST CONDITIONAL BLOCK AND START OF THIS ONE +/* : : Λ : ┃ */ __incr_cov(from!("block end of `if condition2`"),()); +/* : : ┗-----< */ continue; +/* : : ┃ - */ } +/* : : ┃ */ println!("after `continue` test"); +/* : : ┃ */ // maybe add a runtime flag for a possible `return` here? +/* : : ┃ */ __incr_cov(from!("for loop"),()); +/* : ┃ - */ } +/* : ┃ */ println!("after for loop"); +/* : ┃ */ let result = if { // START OF NEW CONDITIONAL EXPRESSION. NEXT "GUARANTEED" COUNTER SHOULD COUNT FROM END OF LAST CONDITIONAL EXPRESSION +/* : ┃ */ // A "GUARANTEED" COUNTER CALL IS ONE THAT WILL BE CALLED REGARDLESS OF OTHER CONDITIONS. THIS INCLUDES: +/* : ┃ */ // * A CONDITIONAL EXPRESSION THAT IS NOT A BLOCK (OR ANOTHER CONDITIONAL STATEMENT, WHICH WOULD CONTAIN A BLOCK) +/* : ┃ */ // * OR IF THE NEXT CONDITIONAL EXPRESSION IS A BLOCK OR CONDITIONAL STATEMENT, THEN THE FIRST "GUARANTEED" COUNTER IN THAT BLOCK +/* : ┃ */ // * END OF BLOCK IF THE BLOCK DOES NOT HAVE INNER CONDITIONAL EXPRESSIONS +/* : ┃ */ // * BRANCHING STATEMENTS (`return`, `break`, `continue`) BY EITHER WRAPPING THE BRANCH STATEMENT NON-BLOCK EXPRESSION, +/* : ┃ */ // OR PREPENDING A COUNTER WITH EMPTY TUPLE IF NO EXPRESSION, OR IF EXPRESSION IS A BLOCK, THEN THE NEXT "GUARANTEED" +/* : ┃ */ // COUNTER CALL WITHIN THAT BLOCK. +/* : ┃ */ // BASICALLY, CARRY THE START OF COVERAGE SPAN FORWARD UNTIL THE GUARANTEED COUNTER IS FOUND +/* : ┃ */ println!("after result = if ..."); +/* : ┃ - */ if __incr_cov(from!("block end of `for` loop"),condition2) { +/* : : ┃ */ println!("before first return"); +/* ┏-:---:-------< */ return __incr_cov(from!("block start"),Ok(())); +/* V : : - */ } else if __incr_cov(from!("`else`"),condition3) { +/* : : ┃ */ // THE ABOVE COUNTER IS _NOT_ REALLY NECESSARY IF EXPRESSION IS GUARANTEED TO EXECUTE. +/* : : ┃ */ // IF WE GET COUNTER IN `else if` BLOCK WE COVERED EXPRESSION. +/* : : ┃ */ // IF WE GET TO ANY REMAINING `else` or `else if` BLOCK WE KNOW WE EVALUATED THIS CONDITION +/* : : ┃ */ // AND ALL OTHERS UP TO THE EXECUTED BLOCK. BUT THE SPAN WOULD HAVE "HOLES" FOR UNEXECUTED BLOCKS. +/* : : ┃ */ println!("not second return"); +/* ┏-:---:-------< */ return __incr_cov(from!("block start"),Ok(())); +/* V : : - */ } else { +/* : : ┃ */ println!("not returning"); +/* : : ┃ */ __incr_cov(from!("block start"),false) +/* : : - */ } +/* : ┃ */ // NO COUNTER HERE BECAUSE NO STATEMENTS AFTER CONDITIONAL BLOCK +/* : ┃ - */ } { +/* : : ┃ */ println!("branched condition returned true"); +/* : : ┃ */ __incr_cov(from!(""),Ok(())) +/* : ┃ - */ } else if self.call_closure( +/* : : - */ |closure_param| __incr_cov(from!(""), +/* : : ┃ - */ if condition3 { +/* : : : ┃ */ println!("in closure, captured condition said to print the param {}", closure_param); +/* : : : ┃ */ __incr_cov(from!(""),false) +/* : : ┃ - */ } else { +/* : : : ┃ */ println!("in closure, captured condition was false"); +/* : : : ┃ */ __incr_cov(from!(""),true) +/* : : ┃ - */ } +/* : : - */ ) +/* : : - */ ) { +/* : : ┃ */ println!("closure returned true"); +/* : : ┃ */ __incr_cov(from!(""),Err(Error::new(ErrorKind::Other, "Result is error if closure returned true"))) +/* : ┃ - */ } else { +/* : : ┃ */ println!("closure returned false"); +/* : : ┃ */ __incr_cov(from!(""),Err(Error::new(ErrorKind::Other, "Result is error if closure returned false"))) +/* : ┃ - */ }; +/* : ┃ */ println!("bottom of function might be skipped if early `return`"); +/* : ┃ */ __incr_cov(from!("if condition1"),result) +/* ┃ - */ } else { +/* : ┃ */ println!("skipping everything in `various()`"); +/* : ┃ */ __incr_cov(from!(""),Ok(())) +/* ┃ - */ } +/* ┃ - */ // __incr_cov(from!(""),0) // DO NOT COUNT IF NO STATEMENTS AFTER CONDITIONAL BLOCK. ALL COVERAGE IS ALREADY COUNTED +/* - */ } +/* */ } +/* */ +/* - */ fn main() -> Result<(), std::io::Error> { +/* ┃ */ //let mut status: u8 = 2; +/* ┃ */ let mut status: u8 = 1; +/* : - */ let result = if status < 2 && +/* : ┃ */ __incr_cov(from!(""),{ +/* : ┃ */ status -= 1; +/* : ┃ */ status == 0 +/* : - - */ }) { +/* : ┃ */ let test_struct = TestStruct::new_with_value(100); +/* : ┃ */ let _ = test_struct.various(); +/* ┏-:---< */ return __incr_cov_and_report(from!(""),Err(Error::new(ErrorKind::Other, format!("Error status {}", status)))) +/* V : - */ } else { +/* : ┃ */ let test_struct = TestStruct::new(); +/* : ┃ */ __incr_cov(from!(""),test_struct.various()) +/* : - */ }; +/* ┃ */ println!("done"); +/* ┃ */ __incr_cov_and_report(from!(""),result) // function-scoped counter index = 0 +/* - */ } \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/coverage_injection_test2.rs b/src/test/codegen/coverage-experiments/src/coverage_injection_test2.rs new file mode 100644 index 0000000000000..8f4399ab51d09 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/coverage_injection_test2.rs @@ -0,0 +1,320 @@ +/* */ use std::io::Error; +/* */ use std::io::ErrorKind; +/* */ +/* */ /// Align Rust counter increment with with: +/* */ /// [‘llvm.instrprof.increment’ Intrinsic](https://llvm.org/docs/LangRef.html#llvm-instrprof-increment-intrinsic) +/* */ /// +/* */ /// declare void @llvm.instrprof.increment(i8* , i64 , i32 , i32 ) +/* */ /// +/* */ /// The first argument is a pointer to a global variable containing the name of the entity +/* */ /// being instrumented. This should generally be the (mangled) function name for a set of +/* */ /// counters. +/* */ /// +/* */ /// The second argument is a hash value that can be used by the consumer of the profile data +/* */ /// to detect changes to the instrumented source, and the third is the number of counters +/* */ /// associated with name. It is an error if hash or num-counters differ between two +/* */ /// instances of instrprof.increment that refer to the same name. +/* */ /// +/* */ /// The last argument refers to which of the counters for name should be incremented. It +/* */ /// should be a value between 0 and num-counters. +/* */ /// +/* */ /// # Arguments +/* */ /// +/* */ /// `mangled_fn_name` - &'static ref to computed and injected static str, using: +/* */ /// +/* */ /// ``` +/* */ /// fn rustc_symbol_mangling::compute_symbol_name( +/* */ /// tcx: TyCtxt<'tcx>, +/* */ /// instance: Instance<'tcx>, +/* */ /// compute_instantiating_crate: impl FnOnce() -> CrateNum, +/* */ /// ) -> String +/* */ /// ``` +/* */ /// +/* */ /// `source_version_hash` - Compute hash based that only changes if there are "significant" +/* */ /// to control-flow inside the function. +/* */ /// +/* */ /// `num_counters` - The total number of counter calls [MAX(counter_index) + 1] within the +/* */ /// function. +/* */ /// +/* */ /// `counter_index` - zero-based counter index scoped by the function. (Ordering of +/* */ /// counters, relative to the source code location, is apparently not expected.) +/* */ /// +/* */ /// # Notes +/* */ /// +/* */ /// * The mangled_fn_name may not be computable until generics are monomorphized (see +/* */ /// parameters required by rustc_symbol_mangling::compute_symbol_name). +/* */ /// * The version hash may be computable from AST analysis, and may not benefit from further +/* */ /// lowering. +/* */ /// * num_counters depends on having already identified all counter insertion locations. +/* */ /// * counter_index can be computed at time of counter insertion (incrementally). +/* */ /// * Numeric parameters are signed to match the llvm increment intrinsic parameter types. +/* */ fn __lower_incr_cov(_mangled_fn_name: &'static str, _fn_version_hash: i64, _num_counters: i32, _counter_index: i32) { +/* */ } +/* */ +/* */ /// A coverage counter implementation that will work as both an intermediate coverage +/* */ /// counting and reporting implementation at the AST-level only--for debugging and +/* */ /// development--but also serves as a "marker" to be replaced by calls to LLVM +/* */ /// intrinsic coverage counter APIs during the lowering process. +/* */ /// +/* */ /// Calls to this function will be injected automatically into the AST. When LLVM intrinsics +/* */ /// are enabled, the counter function calls that were injected into the AST serve as +/* */ /// placeholders, to be replaced by an alternative, such as: +/* */ /// +/* */ /// * direct invocation of the `llvm.instrprof.increment()` intrinsic; or +/* */ /// * the `__lower_incr_cov()` function, defined above, that would invoke the +/* */ /// `llvm.instrprof.increment()` intrinsic; or +/* */ /// * a similar expression wrapper, with the additional parameters (as defined above +/* */ /// for `__lower_incr_cov()`, that invokes `llvm.instrprof.increment()` and returns the +/* */ /// result of the wrapped expression) +/* */ /// +/* */ /// The first two options would require replacing the inlined wrapper call with something +/* */ /// like: +/* */ /// +/* */ /// ``` +/* */ /// { let result = {expr}; __inlined_incr_cov(context, counter); result } +/* */ /// ``` +/* */ /// +/* */ /// But if the lowering process is already unwrapping the inlined call to `__incr_cov()`, then +/* */ /// it may be a perfect opportunity to replace the function with one of these more +/* */ /// direct methods. +/* */ /// +/* */ #[inline(always)] +/* */ pub fn __incr_cov(region_loc: &str) { +/* */ // Either call the intermediate non-llvm coverage counter API or +/* */ // replace the call to this function with the expanded `__lower_incr_cov()` call. +/* */ +/* */ // let _lock = increment_counter(counter); +/* */ println!("{}", region_loc); +/* */ } +/* */ +/* */ /// Write a report identifying each incremented counter and the number of times each counter +/* */ /// was incremented. +/* */ fn __report() { +/* */ println!("WRITE REPORT!"); +/* */ } +/* */ +/* */ macro_rules! from { +/* */ ($from:expr) => { &format!("from: {}\n to: {}:{}:{}", $from, file!(), line!(), column!()) }; +/* */ } +/* */ +/* */ #[derive(Debug)] +/* */ enum TestEnum { +/* */ Red, +/* */ Green, +/* */ Blue, +/* */ } +/* */ +/* */ struct TestStruct { +/* */ field: i32, +/* */ } +/* */ +/* */ // IMPORTANT! IS WRAPPING main() ENOUGH? OR DO I ALSO NEED TO WRAP THREAD FUNCTIONS, ASSUMING +/* */ // THEY ARE STILL RUNNING WITH MAIN EXITS? (IF THEY CAN). NOT SURE HOW RUST HANDLES THAT. +/* */ +/* */ // I SUSPECT USING THREAD_LOCAL COUNTERS MAY NOT ACTUALLY BE AN OPTIMIZATION OVER MUTEX LOCKS, +/* */ // BUT MAYBE I SHOULD ASK. +/* */ +/* */ impl TestStruct { +/* - */ fn new() -> Self { +/* ┃ */ let __result = Self::new_with_value(31415); // function-scoped counter index = 0 +/* ┃ */ __incr_cov(from!("fn new()")); +/* ┃ */ __result +/* - */ } +/* */ +/* - */ fn new_with_value(field: i32) -> Self { +/* ┃ */ let __result = Self { +/* ┃ */ field, +/* ┃ */ }; +/* ┃ */ __incr_cov(from!("fn new_with_value()")); // function-scoped counter index = 0 +/* ┃ */ __result +/* - */ } +/* */ +/* */ fn call_closure(&self, closure: F) -> bool +/* */ where +/* */ F: FnOnce( +/* */ i32, +/* */ ) -> bool, +/* - */ { +/* ┃ */ let __result = closure(123); +/* ┃ */ __incr_cov(from!("fn call_closure()")); // function-scoped counter index = 0 +/* ┃ */ __result +/* - */ } +/* */ +/* - */ fn various(&self) -> Result<(),Error> { +/* ┃ */ use TestEnum::*; +/* ┃ */ let mut color = Red; +/* ┃ */ let _ = color; +/* ┃ */ color = Blue; +/* ┃ */ let _ = color; +/* ┃ */ color = Green; +/* ┃ */ match { let __result = color; __incr_cov(from!("fn various")); __result } { // function-scoped counter index = 0 +/* : */ +/* : */ // !!! RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK (THE FUNCTION IN THIS CASE) TO END OF MATCH EXPRESSION +/* : */ // If `match`, `while`, `loop`, `for`, `if`, etc. expression has a `return`, `break`, or `continue` +/* : */ // (if legal), then RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK TO END OF `return` EXPRESSION +/* : */ // If the expression includes lazy booleans, nest calls to `__incr_cov()`. +/* : I */ Red => {println!("roses"); __incr_cov(from!("Red => or end of MatchArmGuard expression inside pattern, if any"));} +/* : - */ Green => { +/* : ┃ */ let spidey = 100; +/* : ┃ */ let goblin = 50; +/* : ┃ */ // if spidey > goblin {__incr_cov(from!(""),{ +/* : ┃ */ // println!("what ev"); +/* : ┃ */ // })} +/* : ┃ */ // ACTUALLY, WRAPPING THE ENTIRE IF BLOCK IN `__incr_cov` IS NOT A GREAT GENERAL RULE. +/* : ┃ */ // JUST INSERTING A `return`, `break`, or `continue` IN THAT BLOCK (without an intermediate condition) +/* : ┃ */ // MAKES THE `__incr_cov()` CALL UNREACHABLE! +/* : ┃ */ // MY ORIGINAL SOLUTION WORKS BETTER (WRAP LAST EXPRESSION OR AFTER LAST SEMICOLON STATEMENT IN BLOCK) +/* : ┃ */ // UNLESS THE EXPRESSION IS NOT A BLOCK. +/* : ┃ - */ if { let __result = spidey > goblin; __incr_cov(from!("Green => or end of MatchArmGuard expression inside pattern, if any")); __result } { +/* : : ┃ */ println!("spidey beats goblin"); +/* : : ┃ */ __incr_cov(from!("block start")); +/* : ┃ - */ } else if { let __result = spidey == goblin; __incr_cov(from!("`else if` on this line")); __result } { +/* : : ┃ */ // COVERAGE NOTE: Do we mark only the expression span (that may be trivial, as in this case), +/* : : ┃ */ // or associate it with the outer block, similar to how the `if` expression is associated with +/* : : ┃ */ // the outer block? (Although it is a continuation, in a sense, it is discontiguous in this case, +/* : : ┃ */ // so I think simpler to just make it its own coverage region.) +/* : : ┃ */ println!("it's a draw"); +/* : : ┃ */ __incr_cov(from!("block start")); +/* : ┃ - - - */ } else if if { let __result = true; __incr_cov(from!("`else if` on this line")); __result } { +/* : : : ┃ */ // return __incr_cov(from!("after `if true`"),Ok(())); +/* : : : ┃ */ // ACTUALLY, BECAUSE OF `return`, WE DO NOT RECORD THE `if true` EVEN THOUGH WE COVERED IT. +/* : : : ┃ */ // IN FACT, IF THIS NESTED CONDITIONAL IN A CONDITIONAL EXPRESSION WAS AN `if` (WITHOUT PRECEDING ELSE) +/* : : : ┃ */ // WE WOULD NOT HAVE RECORDED THE COVERAGE OF STATEMENTS LEADING UP TO THE `if`, SO +/* : : : ┃ */ // IT SHOULD BE: +/* ┏-:---:-------:---< */ return { let __result = Ok(()); __incr_cov(from!("")); __result }; +/* V : : : : */ // NOTE THE `from` STRING IS SAME FOR THE `else if`s `__incr_cov` AND THIS `return`. +/* : : : : */ // ONLY ONE OF THESE WILL EXECUTE, TO RECORD COVERAGE FROM THAT SPOT. +/* : : ┃ - */ } else { +/* : : : I */ { let __result = false; __incr_cov(from!("`else`")); __result } +/* : : - - */ } { +/* : : ┃ */ println!("wierd science"); +/* : : ┃ */ __incr_cov(from!("block start")); +/* : ┃ - */ } else { +/* : : ┃ */ println!("goblin wins"); +/* ┏-:---:---< */ return { let __result = Ok(()); __incr_cov(from!("`else`")); __result }; // THIS COUNTS LAST STATEMENT IN `else` BLOCK +/* V : : : */ // COVERAGE NOTE: When counting the span for `return`, +/* : : : */ // `break`, or `continue`, also report the outer spans +/* : : : */ // got this far--including this `else` block. Record +/* : : : */ // The start positions for those outer blocks, but: +/* : : : */ // * For the block containing the `return`, `break`, or +/* : : : */ // `continue`, end report the end position is the +/* : : : */ // start of the `return` span (or 1 char before it). +/* : : : */ // * Anything else? +/* : ┃ - */ } +/* : ┃ - */ // __incr_cov(from!("")); // DO NOT COUNT HERE IF NO STATEMENTS AFTER LAST `if` or `match` +/* : - */ }, +/* : I */ Blue => { println!("violets"); __incr_cov(from!("Blue => or end of MatchArmGuard expression inside pattern, if any")); } +/* ┃ */ } +/* ┃ */ +/* ┃ */ let condition1 = true; +/* ┃ */ let condition2 = false; +/* ┃ */ let condition3 = true; +/* ┃ */ +/* ┃ */ println!("Called `various()` for TestStruct with field={}", self.field); +/* ┃ */ +/* ┃ - */ if { let __result = condition1; __incr_cov(from!("after block end of prior `match` (or `if-else if-else`)")); __result } { +/* : ┃ */ println!("before for loop"); +/* : ┃ - */ for index in { let __result = 0..10; __incr_cov(from!("block start")); __result } { +/* : : ┃ */ println!("top of `for` loop"); +/* : : ┃ - */ if { let __result = index == 8; __incr_cov(from!("block start")); __result } { +/* : : : ┃ */ println!("before break"); +/* : : : ┃ */ // note the following is not legal here: +/* : : : ┃ */ // "can only break with a value inside `loop` or breakable block" +/* : : : ┃ */ // break __incr_cov(from!("")); +/* : : : ┃ */ __incr_cov(from!("block start")); +/* : : ┏-----< */ break; +/* : : V : : */ +/* : : : : */ // FIXME(richkadel): add examples with loop labels, breaking out of inner and outer loop to outer loop label, with expression. +/* : : : : */ // May want to record both the span and the start position after the broken out block depdnding on label +/* : : ┃ - */ } +/* : : ┃ */ println!("after `break` test"); +/* : : ┃ - */ if { let __result = condition2; __incr_cov(from!("block end of `if index == 8`")); __result } { +/* ┏-:---:---:---< */ return { let __result = Ok(()); __incr_cov(from!("block start")); __result }; +/* V : : ┃ - */ } +/* : : ┃ */ +/* : : ┃ */ // BECAUSE THE PREVIOUS COVERAGE REGION HAS A `return`, THEN +/* : : ┃ */ // IF PREVIOUS COVERAGE REGION IS NOT COUNTED THEN OUTER REGION REACHED HERE. +/* : : ┃ */ // ADD A COVERAGE REGION FOR THE SPAN FROM JUST AFTER PREVIOUS REGION TO END +/* : : ┃ */ // OF OUTER SPAN, THEN TRUNCATE TO NEXT REGION NOT REACHED. +/* : : ┃ - */ if index % 3 == 2 { // NO __incr_cov() HERE BECAUSE NO STATEMENTS BETWEEN LAST CONDITIONAL BLOCK AND START OF THIS ONE +/* : : Λ : ┃ */ __incr_cov(from!("block end of `if condition2`")); +/* : : ┗-----< */ continue; +/* : : ┃ - */ } +/* : : ┃ */ println!("after `continue` test"); +/* : : ┃ */ // maybe add a runtime flag for a possible `return` here? +/* : : ┃ */ __incr_cov(from!("")); +/* : ┃ - */ } +/* : ┃ */ println!("after for loop"); +/* : ┃ */ let result = if { // START OF NEW CONDITIONAL EXPRESSION. NEXT "GUARANTEED" COUNTER SHOULD COUNT FROM END OF LAST CONDITIONAL EXPRESSION +/* : ┃ */ // A "GUARANTEED" COUNTER CALL IS ONE THAT WILL BE CALLED REGARDLESS OF OTHER CONDITIONS. THIS INCLUDES: +/* : ┃ */ // * A CONDITIONAL EXPRESSION THAT IS NOT A BLOCK (OR ANOTHER CONDITIONAL STATEMENT, WHICH WOULD CONTAIN A BLOCK) +/* : ┃ */ // * OR IF THE NEXT CONDITIONAL EXPRESSION IS A BLOCK OR CONDITIONAL STATEMENT, THEN THE FIRST "GUARANTEED" COUNTER IN THAT BLOCK +/* : ┃ */ // * END OF BLOCK IF THE BLOCK DOES NOT HAVE INNER CONDITIONAL EXPRESSIONS +/* : ┃ */ // * BRANCHING STATEMENTS (`return`, `break`, `continue`) BY EITHER WRAPPING THE BRANCH STATEMENT NON-BLOCK EXPRESSION, +/* : ┃ */ // OR PREPENDING A COUNTER WITH EMPTY TUPLE IF NO EXPRESSION, OR IF EXPRESSION IS A BLOCK, THEN THE NEXT "GUARANTEED" +/* : ┃ */ // COUNTER CALL WITHIN THAT BLOCK. +/* : ┃ */ // BASICALLY, CARRY THE START OF COVERAGE SPAN FORWARD UNTIL THE GUARANTEED COUNTER IS FOUND +/* : ┃ */ println!("after result = if ..."); +/* : ┃ - */ if { let __result = condition2; __incr_cov(from!("block end of `for` loop")); __result } { +/* : : ┃ */ println!("before first return"); +/* ┏-:---:-------< */ return { let __result = Ok(()); __incr_cov(from!("block start")); __result }; +/* V : : - */ } else if { let __result = condition3; __incr_cov(from!("`else`")); __result } { +/* : : ┃ */ // THE ABOVE COUNTER IS _NOT_ REALLY NECESSARY IF EXPRESSION IS GUARANTEED TO EXECUTE. +/* : : ┃ */ // IF WE GET COUNTER IN `else if` BLOCK WE COVERED EXPRESSION. +/* : : ┃ */ // IF WE GET TO ANY REMAINING `else` or `else if` BLOCK WE KNOW WE EVALUATED THIS CONDITION +/* : : ┃ */ // AND ALL OTHERS UP TO THE EXECUTED BLOCK. BUT THE SPAN WOULD HAVE "HOLES" FOR UNEXECUTED BLOCKS. +/* : : ┃ */ println!("not second return"); +/* ┏-:---:-------< */ return { let __result = Ok(()); __incr_cov(from!("block start")); __result }; +/* V : : - */ } else { +/* : : ┃ */ println!("not returning"); +/* : : ┃ */ { let __result = false; __incr_cov(from!("block start")); __result } +/* : : - */ } +/* : ┃ */ // NO COUNTER HERE BECAUSE NO STATEMENTS AFTER CONDITIONAL BLOCK +/* : ┃ - */ } { +/* : : ┃ */ println!("branched condition returned true"); +/* : : ┃ */ { let __result = Ok(()); __incr_cov(from!("")); __result } +/* : ┃ - */ } else if self.call_closure( +/* : : - */ |closure_param| { +/* : : ┃ - */ let __result = if condition3 { +/* : : : ┃ */ println!("in closure, captured condition said to print the param {}", closure_param); +/* : : : ┃ */ { let __result = false; __incr_cov(from!("")); __result } +/* : : ┃ - */ } else { +/* : : : ┃ */ println!("in closure, captured condition was false"); +/* : : : ┃ */ { let __result = true; __incr_cov(from!("")); __result } +/* : : ┃ - */ }; +/* : : - */ __incr_cov(from!("")); __result } +/* : : - */ ) { +/* : : ┃ */ println!("closure returned true"); +/* : : ┃ */ { let __result = Err(Error::new(ErrorKind::Other, "Result is error if closure returned true")); __incr_cov(from!("")); __result } +/* : ┃ - */ } else { +/* : : ┃ */ println!("closure returned false"); +/* : : ┃ */ { let __result = Err(Error::new(ErrorKind::Other, "Result is error if closure returned false")); __incr_cov(from!("")); __result } +/* : ┃ - */ }; +/* : ┃ */ println!("bottom of function might be skipped if early `return`"); +/* : ┃ */ { let __result = result; __incr_cov(from!("if condition1")); __result } +/* ┃ - */ } else { +/* : ┃ */ println!("skipping everything in `various()`"); +/* : ┃ */ { let __result = Ok(()); __incr_cov(from!("")); __result } +/* ┃ - */ } +/* ┃ - */ // __incr_cov(from!(""),0) // DO NOT COUNT IF NO STATEMENTS AFTER CONDITIONAL BLOCK. ALL COVERAGE IS ALREADY COUNTED +/* - */ } +/* */ } +/* */ +/* - */ fn main() -> Result<(), std::io::Error> { +/* ┃ */ //let mut status: u8 = 2; +/* ┃ */ let mut status: u8 = 1; +/* : - */ let result = if status < 2 && +/* : ┃ */ { let __result = { +/* : ┃ */ status -= 1; +/* : ┃ */ status == 0 +/* : - - */ }; __incr_cov(from!("")); __result } { +/* : ┃ */ let test_struct = TestStruct::new_with_value(100); +/* : ┃ */ let _ = test_struct.various(); +/* ┏-:---< */ return { let __result = Err(Error::new(ErrorKind::Other, format!("Error status {}", status))); __incr_cov(from!("")); __report(); __result } +/* V : - */ } else { +/* : ┃ */ let test_struct = TestStruct::new(); +/* : ┃ */ { let __result = test_struct.various(); __incr_cov(from!("")); __result } +/* : - */ }; +/* ┃ */ println!("done"); +/* ┃ */ { let __result = result; __incr_cov(from!("")); __report(); __result } +/* - */ } \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/coverage_injection_test_alt.rs b/src/test/codegen/coverage-experiments/src/coverage_injection_test_alt.rs new file mode 100644 index 0000000000000..20c4835dd882e --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/coverage_injection_test_alt.rs @@ -0,0 +1,362 @@ +/* */ use std::io::Error; +/* */ use std::io::ErrorKind; +/* */ +/* */ /// Align Rust counter increment with with: +/* */ /// [‘llvm.instrprof.increment’ Intrinsic](https://llvm.org/docs/LangRef.html#llvm-instrprof-increment-intrinsic) +/* */ /// +/* */ /// declare void @llvm.instrprof.increment(i8* , i64 , i32 , i32 ) +/* */ /// +/* */ /// The first argument is a pointer to a global variable containing the name of the entity +/* */ /// being instrumented. This should generally be the (mangled) function name for a set of +/* */ /// counters. +/* */ /// +/* */ /// The second argument is a hash value that can be used by the consumer of the profile data +/* */ /// to detect changes to the instrumented source, and the third is the number of counters +/* */ /// associated with name. It is an error if hash or num-counters differ between two +/* */ /// instances of instrprof.increment that refer to the same name. +/* */ /// +/* */ /// The last argument refers to which of the counters for name should be incremented. It +/* */ /// should be a value between 0 and num-counters. +/* */ /// +/* */ /// # Arguments +/* */ /// +/* */ /// `mangled_fn_name` - &'static ref to computed and injected static str, using: +/* */ /// +/* */ /// ``` +/* */ /// fn rustc_symbol_mangling::compute_symbol_name( +/* */ /// tcx: TyCtxt<'tcx>, +/* */ /// instance: Instance<'tcx>, +/* */ /// compute_instantiating_crate: impl FnOnce() -> CrateNum, +/* */ /// ) -> String +/* */ /// ``` +/* */ /// +/* */ /// `source_version_hash` - Compute hash based that only changes if there are "significant" +/* */ /// to control-flow inside the function. +/* */ /// +/* */ /// `num_counters` - The total number of counter calls [MAX(counter_index) + 1] within the +/* */ /// function. +/* */ /// +/* */ /// `counter_index` - zero-based counter index scoped by the function. (Ordering of +/* */ /// counters, relative to the source code location, is apparently not expected.) +/* */ /// +/* */ /// # Notes +/* */ /// +/* */ /// * The mangled_fn_name may not be computable until generics are monomorphized (see +/* */ /// parameters required by rustc_symbol_mangling::compute_symbol_name). +/* */ /// * The version hash may be computable from AST analysis, and may not benefit from further +/* */ /// lowering. +/* */ /// * num_counters depends on having already identified all counter insertion locations. +/* */ /// * counter_index can be computed at time of counter insertion (incrementally). +/* */ /// * Numeric parameters are signed to match the llvm increment intrinsic parameter types. +/* */ fn __lower_incr_cov(_mangled_fn_name: &'static str, _fn_version_hash: i64, _num_counters: i32, _counter_index: i32) { +/* */ } +/* */ +/* */ /// A coverage counter implementation that will work as both an intermediate coverage +/* */ /// counting and reporting implementation at the AST-level only--for debugging and +/* */ /// development--but also serves as a "marker" to be replaced by calls to LLVM +/* */ /// intrinsic coverage counter APIs during the lowering process. +/* */ /// +/* */ /// Calls to this function will be injected automatically into the AST. When LLVM intrinsics +/* */ /// are enabled, the counter function calls that were injected into the AST serve as +/* */ /// placeholders, to be replaced by an alternative, such as: +/* */ /// +/* */ /// * direct invocation of the `llvm.instrprof.increment()` intrinsic; or +/* */ /// * the `__lower_incr_cov()` function, defined above, that would invoke the +/* */ /// `llvm.instrprof.increment()` intrinsic; or +/* */ /// * a similar expression wrapper, with the additional parameters (as defined above +/* */ /// for `__lower_incr_cov()`, that invokes `llvm.instrprof.increment()` and returns the +/* */ /// result of the wrapped expression) +/* */ /// +/* */ /// The first two options would require replacing the inlined wrapper call with something +/* */ /// like: +/* */ /// +/* */ /// ``` +/* */ /// { let result = {expr}; __inlined_incr_cov(context, counter); result } +/* */ /// ``` +/* */ /// +/* */ /// But if the lowering process is already unwrapping the inlined call to `__incr_cov()`, then +/* */ /// it may be a perfect opportunity to replace the function with one of these more +/* */ /// direct methods. +/* */ /// +/* */ #[inline(always)] +/* */ pub fn __incr_cov(region_loc: &str, /*index: u32,*/) { +/* */ // Either call the intermediate non-llvm coverage counter API or +/* */ // replace the call to this function with the expanded `__lower_incr_cov()` call. +/* */ +/* */ // let _lock = increment_counter(counter); +/* */ println!("{}", region_loc); +/* */ } +/* */ +/* */ /// Write a report identifying each incremented counter and the number of times each counter +/* */ /// was incremented. +/* */ fn __report() { +/* */ println!("WRITE REPORT!"); +/* */ } +/* */ +/* */ /// Increment the counter after evaluating the wrapped expression (see `__incr_cov()`), then +/* */ /// write a report identifying each incremented counter and the number of times each counter +/* */ /// was incremented. +/* */ #[inline(always)] +/* */ pub fn __incr_cov_and_report(region_loc: &str, /*counter: u32,*/ result: T) -> T { +/* */ __incr_cov(region_loc, /*counter,*/); +/* */ __report(); +/* */ result +/* */ } +/* */ +/* */ macro_rules! from { +/* */ ($from:expr) => { &format!("from: {}\n to: {}:{}:{}", $from, file!(), line!(), column!()) }; +/* */ } +/* */ +/* */ macro_rules! to { +/* */ ($to:expr) => { &format!("to: {}\n to: {}:{}:{}", $to, file!(), line!(), column!()) }; +/* */ } +/* */ +/* */ #[derive(Debug)] +/* */ enum TestEnum { +/* */ Red, +/* */ Green, +/* */ Blue, +/* */ } +/* */ +/* */ struct TestStruct { +/* */ field: i32, +/* */ } +/* */ +/* */ // IMPORTANT! IS WRAPPING main() ENOUGH? OR DO I ALSO NEED TO WRAP THREAD FUNCTIONS, ASSUMING +/* */ // THEY ARE STILL RUNNING WITH MAIN EXITS? (IF THEY CAN). NOT SURE HOW RUST HANDLES THAT. +/* */ +/* */ // I SUSPECT USING THREAD_LOCAL COUNTERS MAY NOT ACTUALLY BE AN OPTIMIZATION OVER MUTEX LOCKS, +/* */ // BUT MAYBE I SHOULD ASK. +/* */ +/* */ impl TestStruct { +/* - */ fn new() -> Self { +/* ┃ */ __incr_cov(to!("end of fn new()")); // function-scoped counter index = 0 +/* ┃ */ Self::new_with_value(31415) +/* - */ } +/* */ +/* - */ fn new_with_value(field: i32) -> Self { +/* ┃ */ __incr_cov(to!("end of fn new_with_value()")); // function-scoped counter index = 0 +/* ┃ */ Self { +/* ┃ */ field, +/* ┃ */ } +/* - */ } +/* */ +/* */ fn call_closure(&self, closure: F) -> bool +/* */ where +/* */ F: FnOnce( +/* */ i32, +/* */ ) -> bool, +/* - */ { +/* ┃ */ __incr_cov(to!("end of fn call_closure()")); // function-scoped counter index = 0 +/* ┃ */ closure(123) +/* - */ } +/* */ +/* - */ fn various(&self) -> Result<(),Error> { +/* ┃ */ __incr_cov(to!("just before next branch: after `match color`: pattern selection")); +/* ┃ */ use TestEnum::*; +/* ┃ */ let mut color = Red; +/* ┃ */ let _ = color; +/* ┃ */ color = Blue; +/* ┃ */ let _ = color; +/* ┃ */ color = Green; +/* ┃ */ match color { // function-scoped counter index = 0 +/* : */ +/* : */ // !!! RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK (THE FUNCTION IN THIS CASE) TO END OF MATCH EXPRESSION +/* : */ // If `match`, `while`, `loop`, `for`, `if`, etc. expression has a `return`, `break`, or `continue` +/* : */ // (if legal), then RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK TO END OF `return` EXPRESSION +/* : */ // If the expression includes lazy booleans, nest calls to `__incr_cov()`. +/* : - */ Red => { +/* : ┃ */ __incr_cov(to!("end of matched Red")); +/* : ┃ */ println!("roses"); +/* : - */ } +/* : - */ Green => { +/* : ┃ */ __incr_cov(to!("just before next branch: after `if spidey > goblin`")); +/* : ┃ */ let spidey = 100; +/* : ┃ */ let goblin = 50; +/* : ┃ */ // if spidey > goblin {__incr_cov(from!(""),{ +/* : ┃ */ // println!("what ev"); +/* : ┃ */ // })} +/* : ┃ */ // ACTUALLY, WRAPPING THE ENTIRE IF BLOCK IN `__incr_cov` IS NOT A GREAT GENERAL RULE. +/* : ┃ */ // JUST INSERTING A `return`, `break`, or `continue` IN THAT BLOCK (without an intermediate condition) +/* : ┃ */ // MAKES THE `__incr_cov()` CALL UNREACHABLE! +/* : ┃ */ // MY ORIGINAL SOLUTION WORKS BETTER (WRAP LAST EXPRESSION OR AFTER LAST SEMICOLON STATEMENT IN BLOCK) +/* : ┃ */ // UNLESS THE EXPRESSION IS NOT A BLOCK. +/* : ┃ - */ if spidey > goblin { +/* : : ┃ */ __incr_cov(to!("end of if block, if no earlier branch in this scope")); +/* : : ┃ */ println!("spidey beats goblin"); +/* : : ┃ */ +/* : ┃ - */ } else if { +/* : : : */ // Make sure we can't compute the coverage count here. +/* : : : */ // We know the expression executed if the previous if block DID NOT +/* : : : */ // execute, and either this `else if` block does execute OR any subsequent +/* : : : */ // `else if` or `else` blocks execute, OR none of the blocks in the +/* : : : */ // `if`, `else if` or `else` blocks execute. +/* : : : */ // `if`, `else if` or `else` blocks execute. +/* : : ┃ */ __incr_cov(to!("end of `else if spidey == goblin` expression")); +/* : : ┃ */ spidey == goblin +/* : ┃ - */ } { +/* : : ┃ */ __incr_cov(to!("end of if block, if no earlier branch in this scope")); +/* : : ┃ */ // COVERAGE NOTE: Do we mark only the expression span (that may be trivial, as in this case), +/* : : ┃ */ // or associate it with the outer block, similar to how the `if` expression is associated with +/* : : ┃ */ // the outer block? (Although it is a continuation, in a sense, it is discontiguous in this case, +/* : : ┃ */ // so I think simpler to just make it its own coverage region.) +/* : : ┃ */ println!("it's a draw"); +/* : : ┃ */ +/* : ┃ - - - */ } else if { +/* : : ┃ */ __incr_cov(to!("end of `if true`")); +/* : ┃ - - - */ if true { +/* : : : ┃ */ __incr_cov(to!("end of `return Ok(())`")); +/* ┏-:---:-------:---< */ return Ok(()); +/* V : : ┃ - */ } else { +/* : : : ┃ */ // __incr_cov(to!("end of else block")); +/* : : : ┃ */ // computed counter expression +/* : : : ┃ */ false +/* : : : - */ } +/* : : - - - */ } { +/* : : ┃ */ __incr_cov(to!("end of if block")); +/* : : ┃ */ println!("wierd science"); +/* : ┃ - */ } else { +/* : : ┃ */ // __incr_cov(to!("end of `return Ok(())")); +/* : : ┃ */ // counter expression: (start of Green match arm) - (if spidey > goblin) - (previous `} else if {`) +/* : : ┃ */ println!("goblin wins"); +/* ┏-:---:---< */ return Ok(()); // THIS COUNTS LAST STATEMENT IN `else` BLOCK +/* V : : : */ // COVERAGE NOTE: When counting the span for `return`, +/* : : : */ // `break`, or `continue`, also report the outer spans +/* : : : */ // got this far--including this `else` block. Record +/* : : : */ // The start positions for those outer blocks, but: +/* : : : */ // * For the block containing the `return`, `break`, or +/* : : : */ // `continue`, end report the end position is the +/* : : : */ // start of the `return` span (or 1 char before it). +/* : : : */ // * Anything else? +/* : ┃ - */ } +/* : : */ // __incr_cov(to!("end of matched Green")); +/* : : */ // // DO NOT COUNT HERE IF NO STATEMENTS AFTER LAST `if` or `match` +/* : - */ }, +/* : - */ Blue => { +/* : ┃ */ __incr_cov(to!("end of matched Blue")); +/* : ┃ */ println!("violets"); +/* : - */ } +/* ┃ */ } +/* ┃ */ __incr_cov(to!("just before next branch: after `if condition1` (HIR: 'match condition1')")); +/* ┃ */ +/* ┃ */ let condition1 = true; +/* ┃ */ let condition2 = false; +/* ┃ */ let condition3 = true; +/* ┃ */ +/* ┃ */ println!("Called `various()` for TestStruct with field={}", self.field); +/* ┃ */ +/* ┃ - */ if condition1 { +/* : ┃ */ println!("before while loop"); +/* : ┃ */ let mut countdown = 10; +/* : ┃ */ // Must increment before repeated while text expression +/* : : I */ while countdown > 0 { // span is just the while test expression +/* : : ┃ */ println!("top of `while` loop"); +/* : : ┃ */ countdown -= 1; +/* : : ┃ */ // // Counter not needed, but span is computed as "while test" minus "block start" +/* : : ┃ */ // If test expression is 11, and the outer block runs only once, 11-1 = 10 +/* : ┃ - */ } +/* : ┃ */ println!("before for loop"); +/* : ┃ - */ for index in 0..10 { +/* : : ┃ */ println!("top of `for` loop"); +/* : : ┃ - */ if index == 8 { +/* : : : ┃ */ println!("before break"); +/* : : : ┃ */ // note the following is not legal here: +/* : : : ┃ */ // "can only break with a value inside `loop` or breakable block" +/* : : : ┃ */ // break +/* : : : ┃ */ +/* : : ┏-----< */ break; +/* : : V : : */ +/* : : : : */ // FIXME(richkadel): add examples with loop labels, breaking out of inner and outer loop to outer loop label, with expression. +/* : : : : */ // May want to record both the span and the start position after the broken out block depdnding on label +/* : : ┃ - */ } +/* : : ┃ */ println!("after `break` test"); +/* : : ┃ - */ if condition2 { +/* ┏-:---:---:---< */ return Ok(()); +/* V : : ┃ - */ } +/* : : ┃ */ +/* : : ┃ */ // BECAUSE THE PREVIOUS COVERAGE REGION HAS A `return`, THEN +/* : : ┃ */ // IF PREVIOUS COVERAGE REGION IS NOT COUNTED THEN OUTER REGION REACHED HERE. +/* : : ┃ */ // ADD A COVERAGE REGION FOR THE SPAN FROM JUST AFTER PREVIOUS REGION TO END +/* : : ┃ */ // OF OUTER SPAN, THEN TRUNCATE TO NEXT REGION NOT REACHED. +/* : : ┃ - */ if index % 3 == 2 { // NO __incr_cov() HERE BECAUSE NO STATEMENTS BETWEEN LAST CONDITIONAL BLOCK AND START OF THIS ONE +/* : : Λ : ┃ */ +/* : : ┗-----< */ continue; +/* : : ┃ - */ } +/* : : ┃ */ println!("after `continue` test"); +/* : : ┃ */ // maybe add a runtime flag for a possible `return` here? +/* : : ┃ */ +/* : ┃ - */ } +/* : ┃ */ println!("after for loop"); +/* : ┃ */ let result = if { // START OF NEW CONDITIONAL EXPRESSION. NEXT "GUARANTEED" COUNTER SHOULD COUNT FROM END OF LAST CONDITIONAL EXPRESSION +/* : ┃ */ // A "GUARANTEED" COUNTER CALL IS ONE THAT WILL BE CALLED REGARDLESS OF OTHER CONDITIONS. THIS INCLUDES: +/* : ┃ */ // * A CONDITIONAL EXPRESSION THAT IS NOT A BLOCK (OR ANOTHER CONDITIONAL STATEMENT, WHICH WOULD CONTAIN A BLOCK) +/* : ┃ */ // * OR IF THE NEXT CONDITIONAL EXPRESSION IS A BLOCK OR CONDITIONAL STATEMENT, THEN THE FIRST "GUARANTEED" COUNTER IN THAT BLOCK +/* : ┃ */ // * END OF BLOCK IF THE BLOCK DOES NOT HAVE INNER CONDITIONAL EXPRESSIONS +/* : ┃ */ // * BRANCHING STATEMENTS (`return`, `break`, `continue`) BY EITHER WRAPPING THE BRANCH STATEMENT NON-BLOCK EXPRESSION, +/* : ┃ */ // OR PREPENDING A COUNTER WITH EMPTY TUPLE IF NO EXPRESSION, OR IF EXPRESSION IS A BLOCK, THEN THE NEXT "GUARANTEED" +/* : ┃ */ // COUNTER CALL WITHIN THAT BLOCK. +/* : ┃ */ // BASICALLY, CARRY THE START OF COVERAGE SPAN FORWARD UNTIL THE GUARANTEED COUNTER IS FOUND +/* : ┃ */ println!("after result = if ..."); +/* : ┃ - */ if condition2 { +/* : : ┃ */ println!("before first return"); +/* ┏-:---:-------< */ return Ok(()); +/* V : : - */ } else if condition3 { +/* : : ┃ */ // THE ABOVE COUNTER IS _NOT_ REALLY NECESSARY IF EXPRESSION IS GUARANTEED TO EXECUTE. +/* : : ┃ */ // IF WE GET COUNTER IN `else if` BLOCK WE COVERED EXPRESSION. +/* : : ┃ */ // IF WE GET TO ANY REMAINING `else` or `else if` BLOCK WE KNOW WE EVALUATED THIS CONDITION +/* : : ┃ */ // AND ALL OTHERS UP TO THE EXECUTED BLOCK. BUT THE SPAN WOULD HAVE "HOLES" FOR UNEXECUTED BLOCKS. +/* : : ┃ */ println!("not second return"); +/* ┏-:---:-------< */ return Ok(()); +/* V : : - */ } else { +/* : : ┃ */ println!("not returning"); +/* : : ┃ */ false +/* : : - */ } +/* : ┃ */ // NO COUNTER HERE BECAUSE NO STATEMENTS AFTER CONDITIONAL BLOCK +/* : ┃ - */ } { +/* : : ┃ */ println!("branched condition returned true"); +/* : : ┃ */ Ok(()) +/* : ┃ - */ } else if self.call_closure( +/* : : - */ |closure_param| +/* : : ┃ - */ if condition3 { +/* : : : ┃ */ println!("in closure, captured condition said to print the param {}", closure_param); +/* : : : ┃ */ false +/* : : ┃ - */ } else { +/* : : : ┃ */ println!("in closure, captured condition was false"); +/* : : : ┃ */ true +/* : : ┃ - */ } +/* : : - */ +/* : : - */ ) { +/* : : ┃ */ println!("closure returned true"); +/* : : ┃ */ Err(Error::new(ErrorKind::Other, "Result is error if closure returned true")) +/* : ┃ - */ } else { +/* : : ┃ */ println!("closure returned false"); +/* : : ┃ */ Err(Error::new(ErrorKind::Other, "Result is error if closure returned false")) +/* : ┃ - */ }; +/* : ┃ */ println!("bottom of function might be skipped if early `return`"); +/* : ┃ */ result +/* ┃ - */ } else { +/* : ┃ */ println!("skipping everything in `various()`"); +/* : ┃ */ Ok(()) +/* ┃ - */ } +/* ┃ - */ // 0 // DO NOT COUNT IF NO STATEMENTS AFTER CONDITIONAL BLOCK. ALL COVERAGE IS ALREADY COUNTED +/* - */ } +/* */ } +/* */ +/* - */ fn main() -> Result<(), std::io::Error> { +/* ┃ */ //let mut status: u8 = 2; +/* ┃ */ let mut status: u8 = 1; +/* : - */ let result = if status < 2 && +/* : ┃ */ { +/* : ┃ */ status -= 1; +/* : ┃ */ status == 0 +/* : - - */ } { +/* : ┃ */ let test_struct = TestStruct::new_with_value(100); +/* : ┃ */ let _ = test_struct.various(); +/* ┏-:---< */ return __incr_cov_and_report(from!(""),Err(Error::new(ErrorKind::Other, format!("Error status {}", status)))) +/* V : - */ } else { +/* : ┃ */ let test_struct = TestStruct::new(); +/* : ┃ */ test_struct.various() +/* : - */ }; +/* ┃ */ println!("done"); +/* ┃ */ __incr_cov_and_report(from!(""),result) // function-scoped counter index = 0 +/* - */ } \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/drop_trait.rs b/src/test/codegen/coverage-experiments/src/drop_trait.rs new file mode 100644 index 0000000000000..75400e037e9f0 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/drop_trait.rs @@ -0,0 +1,25 @@ +#[inline(always)] +pub fn __incr_cov(_region_loc: &str, result: T) -> T { + result +} + +struct Firework { + _strength: i32, +} + +impl Drop for Firework { + fn drop(&mut self) { + __incr_cov("start of drop()", ()); + } +} + +fn main() -> Result<(),u8> { + let _firecracker = Firework { _strength: 1 }; + + if __incr_cov("start of main()", true) { + return __incr_cov("if true", { let _t = Err(1); _t }); + } + + let _tnt = Firework { _strength: 100 }; + Ok(()) +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/drop_trait_with_comments_prints.rs b/src/test/codegen/coverage-experiments/src/drop_trait_with_comments_prints.rs new file mode 100644 index 0000000000000..de9f5d5cb4647 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/drop_trait_with_comments_prints.rs @@ -0,0 +1,53 @@ +// +// +// +// It's interesting to speculate if there is a way to leverage the Drop trait functionality +// to increment counters when a scope is closed, but I don't think it would help "out of the box". +// +// A `return` or `break` with expression might not need a temp value expression wrapper +// such as `return { let _t = result_expression; __incr_counter(...); _t };` +// +// ... **if** the __incr_counter() was somehow called from a "drop()" trait function. +// +// The problem is, since the drop call is automatic, there is no way to have argument variants +// depending on where the drop() occurs (e.g., from a `return` statement vs. from the end of +// the function). We need 2 different code regions though. +// +// +// +// + +#[inline(always)] +pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { + // println!("from: {}", _region_loc); + result +} + +struct Firework { + strength: i32, +} + +impl Drop for Firework { + fn drop(&mut self) { + println!("BOOM times {}!!!", self.strength); + __incr_cov("start of drop()", ()); + } +} + +fn main() -> Result<(),u8> { + let _firecracker = Firework { strength: 1 }; + + if __incr_cov("start of main()", true) { + return __incr_cov("if true", { let _t = Err(1); println!("computing return value"); _t }); + } + + let _tnt = Firework { strength: 100 }; + // __incr_cov("after if block", Ok(())) // CAN USE COUNTER EXPRESSION: "start of drop()" - "if true" + Ok(()) +} + +// OUTPUT WHEN RUNNING THIS PROGRAM IS AS EXPECTED: + +// computing return value +// BOOM times 1!!! +// Error: 1 diff --git a/src/test/codegen/coverage-experiments/src/for.rs b/src/test/codegen/coverage-experiments/src/for.rs new file mode 100644 index 0000000000000..3f44c382a1e3f --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/for.rs @@ -0,0 +1,41 @@ +#[inline(always)] +pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { + result +} + +fn main() { + for countdown in __incr_cov("start", 10..0) { + let _ = countdown; + __incr_cov("top of for", ()); + } +} + +// LOWERED TO HIR: +// +// fn main() { +// { +// let _t = +// match ::std::iter::IntoIterator::into_iter(__incr_cov("start", +// ::std::ops::Range{start: +// 10, +// end: +// 0,})) +// { +// mut iter => +// loop { +// let mut __next; +// match ::std::iter::Iterator::next(&mut iter) { +// ::std::option::Option::Some(val) => +// __next = val, +// ::std::option::Option::None => break , +// } +// let countdown = __next; +// { +// let _ = countdown; +// __incr_cov("top of for", ()); +// } +// }, +// }; +// _t +// } +// } \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/for_with_comments.rs b/src/test/codegen/coverage-experiments/src/for_with_comments.rs new file mode 100644 index 0000000000000..03d11b2c230ca --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/for_with_comments.rs @@ -0,0 +1,24 @@ +/* */ #[inline(always)] +/* */ pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { +/* */ result +/* */ } +/* */ +/* - */ fn main() { +/* : I */ for countdown in __incr_cov("start", 10..0) { // span is just the while test expression +/* : ┃ */ let _ = countdown; +/* : ┃ */ __incr_cov("top of for", ()); +/* ┃ - */ } +/* - */ } + + +// -Z unpretty=val -- present the input source, unstable (and less-pretty) variants; +// valid types are any of the types for `--pretty`, as well as: +// `expanded`, `expanded,identified`, +// `expanded,hygiene` (with internal representations), +// `everybody_loops` (all function bodies replaced with `loop {}`), +// `hir` (the HIR), `hir,identified`, +// `hir,typed` (HIR with types for each node), +// `hir-tree` (dump the raw HIR), +// `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR) + +// argument to `pretty` must be one of `normal`, `expanded`, `identified`, or `expanded,identified` diff --git a/src/test/codegen/coverage-experiments/src/if.rs b/src/test/codegen/coverage-experiments/src/if.rs new file mode 100644 index 0000000000000..ad50f6be19004 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/if.rs @@ -0,0 +1,80 @@ +#![feature(core_intrinsics)] + +pub fn __llvm_incr_counter(_region_loc: &str) { +} + +#[inline(always)] +pub fn __incr_cov(region_loc: &str, result: T) -> T { + __llvm_incr_counter(region_loc); + result +} + +static TEST_FUNC_NAME: &'static [u8; 6] = b"main()"; + +fn main() { + let mut countdown = 10; + if __incr_cov("start", countdown > 0) { + + + // // TEST CALLING INTRINSIC: + unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 314 as u32, 31 as u32) }; + // // Results in: + // // LLVM ERROR: Cannot select: intrinsic %llvm.instrprof.increment + // // I may need to pass one or more of the following flags (or equivalent opts) to LLVM to enable this: + // // -fprofile-instr-generate -fcoverage-mapping + + + countdown -= 1; + __incr_cov("if block",()); + } else if countdown > 5 { + countdown -= 2; + __incr_cov("else if block",()); + } else { + countdown -= 3; + } + + let mut countdown = 10; + if { let _tcov = countdown > 0; __llvm_incr_counter("start", ); _tcov } { + countdown -= 1; + __incr_cov("if block",()); + } else if countdown > 5 { + countdown -= 2; + __incr_cov("else if block",()); + } else { + countdown -= 3; + } +} + +// NOTE: hir REDUNDANTLY lowers the manually inlined counter in the second if block to: +// +// match { +// let _t = +// { +// let _tcov = countdown > 0; +// __llvm_incr_counter("start"); +// _tcov +// }; +// _t +// } { + +// I don't know if optimization phases will fix this or not. +// Otherwise, a more optimal (but definitely special case) way to handle this would be +// to inject the counter between the hir-introduced temp `_t` assignment and the block result +// line returning `_t`: +// +// match { +// let _t = countdown > 0; +// __llvm_incr_counter("start"); // <-- the only thing inserted for coverage here +// _t +// } +// +// UNFORTUNATELY THIS IS NOT A PATTERN WE CAN ALWAYS LEVERAGE, FOR EXPRESSIONS THAT HAVE VALUES +// WHERE WE NEED TO INJECT THE COUNTER AFTER THE EXPRESSION BUT BEFORE IT IS USED. +// +// IT DOES APPEAR TO BE THE CASE FOR WHILE EXPRESSIONS, (BECOMES loop { match { let _t = condition; _t} { true => {...} _ => break, }}) +// AND IS TRUE FOR IF EXPRESSIONS AS NOTED +// BUT NOT FOR RETURN STATEMENT (and I'm guessing not for loop { break value; } ? ) +// +// AND NOT FOR LAZY BOOLEAN EXPRESSIONS! +// +// AND NOT FOR MATCH EXPRESSIONS IN THE ORIGINAL SOURCE! \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/if_with_comments.rs b/src/test/codegen/coverage-experiments/src/if_with_comments.rs new file mode 100644 index 0000000000000..267e7bca2c5a2 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/if_with_comments.rs @@ -0,0 +1,39 @@ +/* */ #[inline(always)] +/* */ pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { +/* */ result +/* */ } +/* */ +/* - */ fn main() { +/* ┃ */ let mut countdown = 10; +/* : I */ if __incr_cov("start", countdown > 0) { // span is from start of main() +/* : ┃ */ countdown -= 1; +/* : ┃ */ __incr_cov("if block",()); +/* ┃ - */ } + + let mut countdown = 10; + if __incr_cov("start", countdown > 0) { + countdown -= 1; + __incr_cov("if block",()); + } else if countdown > 5 { // counter expression "start" - "if block" + countdown -= 2; + __incr_cov("else if block",()); + } else { + countdown -= 3; + // __incr_cov("else block",()); // counter expression (countdown > 5 counter expression) - "else if block" + // PLACED AT END OF ELSE BLOCK OR START OF FIRST CONDITIONAL BLOCK, IF ANY (PRESUMING POSSIBLE EARLY EXIT). + // IF WE CAN GUARANTEE NO EARLY EXIT IN THIS BLOCK, THEN AT THE END IS FINE EVEN IF ELSE BLOCK CONTAINS OTHER CONDITIONS. + } + +/* - */ } + +// -Z unpretty=val -- present the input source, unstable (and less-pretty) variants; +// valid types are any of the types for `--pretty`, as well as: +// `expanded`, `expanded,identified`, +// `expanded,hygiene` (with internal representations), +// `everybody_loops` (all function bodies replaced with `loop {}`), +// `hir` (the HIR), `hir,identified`, +// `hir,typed` (HIR with types for each node), +// `hir-tree` (dump the raw HIR), +// `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR) + +// argument to `pretty` must be one of `normal`, `expanded`, `identified`, or `expanded,identified` diff --git a/src/test/codegen/coverage-experiments/src/increment_intrinsic.rs b/src/test/codegen/coverage-experiments/src/increment_intrinsic.rs new file mode 100644 index 0000000000000..d4708cd367ff6 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/increment_intrinsic.rs @@ -0,0 +1,11 @@ +#![feature(core_intrinsics)] + +pub fn not_instrprof_increment(_hash: u64, _num_counters: u32, _index: u32) { +} + +fn main() { + // COMPARE THIS WITH INTRINSIC INSERTION + //not_instrprof_increment(1234 as u64, 314 as u32, 31 as u32); + + unsafe { core::intrinsics::instrprof_increment(1234 as u64, 314 as u32, 31 as u32) }; +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/just_main.rs b/src/test/codegen/coverage-experiments/src/just_main.rs new file mode 100644 index 0000000000000..081e5d72a6e0a --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/just_main.rs @@ -0,0 +1,3 @@ +fn main() { + println!("hello world! (should be covered)"); +} diff --git a/src/test/codegen/coverage-experiments/src/lazy_boolean.rs b/src/test/codegen/coverage-experiments/src/lazy_boolean.rs new file mode 100644 index 0000000000000..263277c7cdc4d --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/lazy_boolean.rs @@ -0,0 +1,17 @@ +pub fn __llvm_incr_counter(_region_loc: &str) { +} + +#[inline(always)] +pub fn __incr_cov(region_loc: &str, result: T) -> T { + __llvm_incr_counter(region_loc); + result +} + +fn main() { + let a = 1; + let b = 10; + let c = 100; + let _result = __incr_cov("start", a < b) || __incr_cov("or", b < c); + + let _result = { let _t = a < b; __llvm_incr_counter("start"); _t } || { let _t = b < c; __llvm_incr_counter("start"); _t }; +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/loop_break_value.rs b/src/test/codegen/coverage-experiments/src/loop_break_value.rs new file mode 100644 index 0000000000000..76caa833ec4f8 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/loop_break_value.rs @@ -0,0 +1,15 @@ +pub fn __llvm_incr_counter(_region_loc: &str) { +} + +#[inline(always)] +pub fn __incr_cov(region_loc: &str, result: T) -> T { + __llvm_incr_counter(region_loc); + result +} + +fn main() { + __incr_cov("start", ()); + let _result = loop { + break __incr_cov("top of loop", true); + }; +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match.rs b/src/test/codegen/coverage-experiments/src/match.rs new file mode 100644 index 0000000000000..afbb20888eab5 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/match.rs @@ -0,0 +1,22 @@ +pub fn __llvm_incr_counter(_region_loc: &str) { +} + +#[inline(always)] +pub fn __incr_cov(region_loc: &str, result: T) -> T { + __llvm_incr_counter(region_loc); + result +} + +fn main() { + let a = 1; + let b = 10; + let _result = match a < b { + true => true, + _ => false, + }; + + let _result = match __incr_cov("end of first match", a < b) { + true => __incr_cov("matched true", true), + _ => false, // counter expression "end of first match" - "matched true" + }; +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match_with_increment.rs b/src/test/codegen/coverage-experiments/src/match_with_increment.rs new file mode 100644 index 0000000000000..f618b37ed5247 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/match_with_increment.rs @@ -0,0 +1,305 @@ +#![feature(core_intrinsics)] +//static TEST_FUNC_NAME: &'static [u8; 7] = b"main()\0"; + static TEST_FUNC_NAME: &'static [u8; 6] = b"main()"; +fn main() { + let a = 1; + let b = 10; + let _result = match { + let _t = a < b; + unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 3 as u32, 0 as u32) }; + _t + } { + true => { + let _t = true; + unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 3 as u32, 1 as u32) }; + _t + } + _ => false, + }; +} + +/* + +I NEED TO INSERT THE instrprof_increment() CALL: + + 1. JUST BEFORE THE switchInt(_4) (because we haven't counted entering the function main() yet, deferring that to "JUST BEFORE FIRST BRANCH") + 2. SOME TIME AFTER THE switchInt(_4), AND JUST BEFORE ANOTHER BRANCH (in this case, before "goto") + 2.a. NOT BEFORE BOTH GOTO'S AFTER switchInt(_4) (because one can be calculated by counter expression), BUT PERHAPS INSERT A noop PLACEHOLDER + AS A MARKER TO INCLUDE THE COVERAGE REGION AND REFERENCE THE COUNTERS TO BE SUBTRACTED (AND/OR SUMMED)? + + WHY DEFER INSERTING COUNTERS TO "JUST BEFORE FIRST BRANCH"? We can ignore panic/unwind() and only count if the coverage region ACTUALLY + executed in entirety. BUT IS THAT NECESSARY? IS IT MUCH EASIER TO INSERT COUNTERS AT THE TOP OF A REGION THAT MUST EXECUTE IN ENTIRETY IF + PANIC DOES NOT OCCUR? AND WHAT IF WE ADD SUPPORT FOR PANIC UNWIND (later)? + + IS THERE A BENEFIT OF THE DEFERRED APPROACH WHEN CONSIDERING EXPRESSIONS MAY HAVE EARLY RETURNS? (BECAUSE, WE STILL NEED TO COUNT THE REGION + LEADING UP TO THE EXPRESSION ANYWAY) + +================================================= +================================================= + +To inject an intrinsic after computing a final expression value of a coverage region: + +Replace the following basic block end (last statement plus terminator): + +... ... +StorageLive(_4) +StorageLive(_5) +_5 = _1 +StorageLive(_6) +_6 = _2 +_4 = Lt(move _5, move _6) +StorageDead(_6) +StorageDead(_5) + <------ to insert instrprof_increment() here +FakeRead(ForMatchedPlace, _4) +-------------------------------------------------------------------------------------- +switchInt(_4) + + +================================================= +Insert call to intrinsic with: + +StorageLive(_4) # _4 is now meant for deferred FakeRead(ForMatchdPlace, _4) in BasicBlock after increment() call +StorageLive(_5) # Unchanged except _4 is now _5 +StorageLive(_6) # Unchanged except _5 is now _6 +_6 = _1 # Unchanged except _5 is now _6 +StorageLive(_7) # Unchanged except _6 is now _7 +_7 = _2 # Unchanged except _6 is now _7 +_5 = Lt(move _6, move _7) # Unchanged except _4, _5, _6 is now _5, _6, _7 +StorageDead(_7) # Unchanged except _6 is now _7 +StorageDead(_6) # Unchanged except _5 is now _6 + +FakeRead(ForLet, _5) # CHANGED ForMatchedPlace to ForLet + +> # ALL NEW AND NECESSARY TO CALL instrprof_increment() +> StorageLive(_8) # ?? stores function pointer to instrprof_increment function? +> StorageLive(_9) +> StorageLive(_10) +> StorageLive(_11) +> _11 = const {alloc1+0: &&[u8; 6]} +> _10 = &raw const (*(*_11)) +> _9 = move _10 as *const u8 (Pointer(ArrayToPointer)) +> StorageDead(_10) +> StorageLive(_12) +> _12 = const 1234u64 +> StorageLive(_13) +> _13 = const 3u32 +> StorageLive(_14) +> _14 = const 0u32 +> -------------------------------------------------------------------------------------- +> _8 = const std::intrinsics::instrprof_increment(move _9, move _12, move _13, move _14) +> +> -> return +> +> StorageDead(_14) +> StorageDead(_13) +> StorageDead(_12) +> StorageDead(_9) +> StorageDead(_11) +> StorageDead(_8) + +_4 = _5 # ARE THESE LINES REDUNDANT? CAN I JUST PASS _5 DIRECTLY TO FakeRead()? +StorageDead(_5) # DROP "_t" temp result of `let _t = a < b` + # (NOTE THAT IF SO, I CAN REMOVE _5 altogether, and use _4, which coincidentally makes less changes) + # SEE BELOW + +FakeRead(ForMatchedPlace, _4) # Unchanged +-------------------------------------------------------------------------------------- +switchInt(_4) # Unchanged + + +================================================= +Can I skip the extra variable and insert call to intrinsic with: + +StorageLive(_4) # Unchanged +StorageLive(_5) # Unchanged +_5 = _1 # Unchanged +StorageLive(_6) # Unchanged +_6 = _2 # Unchanged +_4 = Lt(move _5, move _6) # Unchanged +StorageDead(_6) # Unchanged +StorageDead(_5) # Unchanged + +> # ALL NEW AND NECESSARY TO CALL instrprof_increment() +> FakeRead(ForLet, _4) # Save the post-increment result in temp "_t" +> StorageLive(_8) # ?? stores function pointer to instrprof_increment function? +> StorageLive(_9) +> StorageLive(_10) +> StorageLive(_11) +> _11 = const {alloc1+0: &&[u8; 6]} +> _10 = &raw const (*(*_11)) +> _9 = move _10 as *const u8 (Pointer(ArrayToPointer)) +> StorageDead(_10) +> StorageLive(_12) +> _12 = const 1234u64 +> StorageLive(_13) +> _13 = const 3u32 +> StorageLive(_14) +> _14 = const 0u32 +> -------------------------------------------------------------------------------------- +> _8 = const std::intrinsics::instrprof_increment(move _9, move _12, move _13, move _14) +> +> -> return +> +> StorageDead(_14) +> StorageDead(_13) +> StorageDead(_12) +> StorageDead(_9) +> StorageDead(_11) +> StorageDead(_8) + +FakeRead(ForMatchedPlace, _4) # Unchanged (PREVIOUSLY USED IN FakeRead(ForLet), is that OK?) +-------------------------------------------------------------------------------------- +switchInt(_4) # Unchanged + + + + + +================================================= +================================================= + +For the second inserted call to instrprof_increment, without that call we have: + +-------------------------------------------------------------------------------------- +switchInt(_4) # From above + +-> otherwise # that is, "NOT false" + +_3 = const true + <------ to insert instrprof_increment() here +-------------------------------------------------------------------------------------- +goto + +-> # No label. No condition, and not a "return" + +FakeRead(ForLet, _3) # NOTE: Unused result +StorageDead(_4) +_0 = () +StorageDead(_3) +StorageDead(_2) +StorageDead(_1) +-------------------------------------------------------------------------------------- +goto + +-> # No label. No condition, and not a "return" + +return # from main() + + +================================================= +With the call to increment(): + +-------------------------------------------------------------------------------------- +switchInt(_4) # From above + +-> otherwise # "NOT false" # UNCHANGED + +StorageLive(_15) # CHANGED! Allocated new storage (_15) for the result of match, if true. +_15 = const true # UNCHANGED except _3 is now _15 +FakeRead(ForLet, _15) # CHANGED! Assign value to temporary (to be assigned to _3 later) ... Do I need to do this? + +> # ALL NEW AND NECESSARY TO CALL instrprof_increment() +> StorageLive(_16) # pointer to instrprof_increment() function ? +> StorageLive(_17) +> StorageLive(_18) +> StorageLive(_19) +> _19 = const {alloc1+0: &&[u8; 6]} +> _18 = &raw const (*(*_19)) +> _17 = move _18 as *const u8 (Pointer(ArrayToPointer)) +> StorageDead(_18) +> StorageLive(_20) +> _20 = const 1234u64 +> StorageLive(_21) +> _21 = const 3u32 +> StorageLive(_22) +> _22 = const 1u32 +> -------------------------------------------------------------------------------------- +> _16 = const std::intrinsics::instrprof_increment(move _17, move _20, move _21, move _22) +> +> -> return +> +> StorageDead(_22) +> StorageDead(_21) +> StorageDead(_20) +> StorageDead(_17) +> StorageDead(_19) +> StorageDead(_16) +> _3 = _15 +> StorageDead(_15) + +--------------------------------# UNCHANGED------------------------------------------- +goto # UNCHANGED + +-> # UNCHANGED + +FakeRead(ForLet, _3) # UNCHANGED +StorageDead(_4) # UNCHANGED +_0 = () # UNCHANGED +StorageDead(_3) # UNCHANGED +StorageDead(_2) # UNCHANGED +StorageDead(_1) # UNCHANGED +-------------------------------------------------------------------------------------- +goto # UNCHANGED + +-> # UNCHANGED + +return # from main() # UNCHANGED + +================================================= +As before, can I skip the extra variable (_15) and insert the call to intrinsic with _3 directly?: + + +-------------------------------------------------------------------------------------- +switchInt(_4) # From above + +-> otherwise # "NOT false" # UNCHANGED + +_3 = const true # UNCHANGED? + +> # ALL NEW AND NECESSARY TO CALL instrprof_increment() +> StorageLive(_16) # pointer to instrprof_increment() function ? +> StorageLive(_17) +> StorageLive(_18) +> StorageLive(_19) +> _19 = const {alloc1+0: &&[u8; 6]} +> _18 = &raw const (*(*_19)) +> _17 = move _18 as *const u8 (Pointer(ArrayToPointer)) +> StorageDead(_18) +> StorageLive(_20) +> _20 = const 1234u64 +> StorageLive(_21) +> _21 = const 3u32 +> StorageLive(_22) +> _22 = const 1u32 +> -------------------------------------------------------------------------------------- +> _16 = const std::intrinsics::instrprof_increment(move _17, move _20, move _21, move _22) +> +> -> return +> +> StorageDead(_22) +> StorageDead(_21) +> StorageDead(_20) +> StorageDead(_17) +> StorageDead(_19) +> StorageDead(_16) + +--------------------------------# UNCHANGED------------------------------------------- +goto # UNCHANGED + +-> # UNCHANGED + +FakeRead(ForLet, _3) # UNCHANGED +StorageDead(_4) # UNCHANGED +_0 = () # UNCHANGED +StorageDead(_3) # UNCHANGED +StorageDead(_2) # UNCHANGED +StorageDead(_1) # UNCHANGED +-------------------------------------------------------------------------------------- +goto # UNCHANGED + +-> # UNCHANGED + +return # from main() # UNCHANGED + +*/ \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match_with_increment_alt.rs b/src/test/codegen/coverage-experiments/src/match_with_increment_alt.rs new file mode 100644 index 0000000000000..60586967920cb --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/match_with_increment_alt.rs @@ -0,0 +1,296 @@ +#![feature(core_intrinsics)] +//static TEST_FUNC_NAME: &'static [u8; 7] = b"main()\0"; + static TEST_FUNC_NAME: &'static [u8; 6] = b"main()"; +fn main() { + unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 3 as u32, 0 as u32) }; + let a = 1; + let b = 10; + let _result = match a < b { + true => { + unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 3 as u32, 1 as u32) }; + true + } + _ => false, + }; +} + +/* + +ALTERNATE APPROACH: + + IS IT MUCH EASIER TO INSERT COUNTERS AT THE TOP OF A REGION THAT MUST EXECUTE IN ENTIRETY IF + PANIC DOES NOT OCCUR? AND WHAT IF WE ADD SUPPORT FOR PANIC UNWIND (later)? + + IS THERE A DETRACTOR COMPARED TO THE DEFERRED APPROACH WHEN CONSIDERING EXPRESSIONS MAY HAVE EARLY RETURNS? + + (BECAUSE, WE STILL NEED TO COUNT THE REGION LEADING UP TO THE EXPRESSION ANYWAY) + +================================================= +================================================= + +To inject an intrinsic after computing a final expression value of a coverage region: + +Replace the following basic block end (last statement plus terminator): + +... ... +StorageLive(_4) +StorageLive(_5) +_5 = _1 +StorageLive(_6) +_6 = _2 +_4 = Lt(move _5, move _6) +StorageDead(_6) +StorageDead(_5) + <------ to insert instrprof_increment() here +FakeRead(ForMatchedPlace, _4) +-------------------------------------------------------------------------------------- +switchInt(_4) + + +================================================= +Insert call to intrinsic with: + +StorageLive(_4) # _4 is now meant for deferred FakeRead(ForMatchdPlace, _4) in BasicBlock after increment() call +StorageLive(_5) # Unchanged except _4 is now _5 +StorageLive(_6) # Unchanged except _5 is now _6 +_6 = _1 # Unchanged except _5 is now _6 +StorageLive(_7) # Unchanged except _6 is now _7 +_7 = _2 # Unchanged except _6 is now _7 +_5 = Lt(move _6, move _7) # Unchanged except _4, _5, _6 is now _5, _6, _7 +StorageDead(_7) # Unchanged except _6 is now _7 +StorageDead(_6) # Unchanged except _5 is now _6 + +FakeRead(ForLet, _5) # CHANGED ForMatchedPlace to ForLet + +> # ALL NEW AND NECESSARY TO CALL instrprof_increment() +> StorageLive(_8) # ?? stores function pointer to instrprof_increment function? +> StorageLive(_9) +> StorageLive(_10) +> StorageLive(_11) +> _11 = const {alloc1+0: &&[u8; 6]} +> _10 = &raw const (*(*_11)) +> _9 = move _10 as *const u8 (Pointer(ArrayToPointer)) +> StorageDead(_10) +> StorageLive(_12) +> _12 = const 1234u64 +> StorageLive(_13) +> _13 = const 3u32 +> StorageLive(_14) +> _14 = const 0u32 +> -------------------------------------------------------------------------------------- +> _8 = const std::intrinsics::instrprof_increment(move _9, move _12, move _13, move _14) +> +> -> return +> +> StorageDead(_14) +> StorageDead(_13) +> StorageDead(_12) +> StorageDead(_9) +> StorageDead(_11) +> StorageDead(_8) + +_4 = _5 # ARE THESE LINES REDUNDANT? CAN I JUST PASS _5 DIRECTLY TO FakeRead()? +StorageDead(_5) # DROP "_t" temp result of `let _t = a < b` + # (NOTE THAT IF SO, I CAN REMOVE _5 altogether, and use _4, which coincidentally makes less changes) + # SEE BELOW + +FakeRead(ForMatchedPlace, _4) # Unchanged +-------------------------------------------------------------------------------------- +switchInt(_4) # Unchanged + + +================================================= +Can I skip the extra variable and insert call to intrinsic with: + +StorageLive(_4) # Unchanged +StorageLive(_5) # Unchanged +_5 = _1 # Unchanged +StorageLive(_6) # Unchanged +_6 = _2 # Unchanged +_4 = Lt(move _5, move _6) # Unchanged +StorageDead(_6) # Unchanged +StorageDead(_5) # Unchanged + +> # ALL NEW AND NECESSARY TO CALL instrprof_increment() +> FakeRead(ForLet, _4) # Save the post-increment result in temp "_t" +> StorageLive(_8) # ?? stores function pointer to instrprof_increment function? +> StorageLive(_9) +> StorageLive(_10) +> StorageLive(_11) +> _11 = const {alloc1+0: &&[u8; 6]} +> _10 = &raw const (*(*_11)) +> _9 = move _10 as *const u8 (Pointer(ArrayToPointer)) +> StorageDead(_10) +> StorageLive(_12) +> _12 = const 1234u64 +> StorageLive(_13) +> _13 = const 3u32 +> StorageLive(_14) +> _14 = const 0u32 +> -------------------------------------------------------------------------------------- +> _8 = const std::intrinsics::instrprof_increment(move _9, move _12, move _13, move _14) +> +> -> return +> +> StorageDead(_14) +> StorageDead(_13) +> StorageDead(_12) +> StorageDead(_9) +> StorageDead(_11) +> StorageDead(_8) + +FakeRead(ForMatchedPlace, _4) # Unchanged (PREVIOUSLY USED IN FakeRead(ForLet), is that OK?) +-------------------------------------------------------------------------------------- +switchInt(_4) # Unchanged + + + + + +================================================= +================================================= + +For the second inserted call to instrprof_increment, without that call we have: + +-------------------------------------------------------------------------------------- +switchInt(_4) # From above + +-> otherwise # that is, "NOT false" + +_3 = const true + <------ to insert instrprof_increment() here +-------------------------------------------------------------------------------------- +goto + +-> # No label. No condition, and not a "return" + +FakeRead(ForLet, _3) # NOTE: Unused result +StorageDead(_4) +_0 = () +StorageDead(_3) +StorageDead(_2) +StorageDead(_1) +-------------------------------------------------------------------------------------- +goto + +-> # No label. No condition, and not a "return" + +return # from main() + + +================================================= +With the call to increment(): + +-------------------------------------------------------------------------------------- +switchInt(_4) # From above + +-> otherwise # "NOT false" # UNCHANGED + +StorageLive(_15) # CHANGED! Allocated new storage (_15) for the result of match, if true. +_15 = const true # UNCHANGED except _3 is now _15 +FakeRead(ForLet, _15) # CHANGED! Assign value to temporary (to be assigned to _3 later) ... Do I need to do this? + +> # ALL NEW AND NECESSARY TO CALL instrprof_increment() +> StorageLive(_16) # pointer to instrprof_increment() function ? +> StorageLive(_17) +> StorageLive(_18) +> StorageLive(_19) +> _19 = const {alloc1+0: &&[u8; 6]} +> _18 = &raw const (*(*_19)) +> _17 = move _18 as *const u8 (Pointer(ArrayToPointer)) +> StorageDead(_18) +> StorageLive(_20) +> _20 = const 1234u64 +> StorageLive(_21) +> _21 = const 3u32 +> StorageLive(_22) +> _22 = const 1u32 +> -------------------------------------------------------------------------------------- +> _16 = const std::intrinsics::instrprof_increment(move _17, move _20, move _21, move _22) +> +> -> return +> +> StorageDead(_22) +> StorageDead(_21) +> StorageDead(_20) +> StorageDead(_17) +> StorageDead(_19) +> StorageDead(_16) +> _3 = _15 +> StorageDead(_15) + +--------------------------------# UNCHANGED------------------------------------------- +goto # UNCHANGED + +-> # UNCHANGED + +FakeRead(ForLet, _3) # UNCHANGED +StorageDead(_4) # UNCHANGED +_0 = () # UNCHANGED +StorageDead(_3) # UNCHANGED +StorageDead(_2) # UNCHANGED +StorageDead(_1) # UNCHANGED +-------------------------------------------------------------------------------------- +goto # UNCHANGED + +-> # UNCHANGED + +return # from main() # UNCHANGED + +================================================= +As before, can I skip the extra variable (_15) and insert the call to intrinsic with _3 directly?: + + +-------------------------------------------------------------------------------------- +switchInt(_4) # From above + +-> otherwise # "NOT false" # UNCHANGED + +_3 = const true # UNCHANGED? + +> # ALL NEW AND NECESSARY TO CALL instrprof_increment() +> StorageLive(_16) # pointer to instrprof_increment() function ? +> StorageLive(_17) +> StorageLive(_18) +> StorageLive(_19) +> _19 = const {alloc1+0: &&[u8; 6]} +> _18 = &raw const (*(*_19)) +> _17 = move _18 as *const u8 (Pointer(ArrayToPointer)) +> StorageDead(_18) +> StorageLive(_20) +> _20 = const 1234u64 +> StorageLive(_21) +> _21 = const 3u32 +> StorageLive(_22) +> _22 = const 1u32 +> -------------------------------------------------------------------------------------- +> _16 = const std::intrinsics::instrprof_increment(move _17, move _20, move _21, move _22) +> +> -> return +> +> StorageDead(_22) +> StorageDead(_21) +> StorageDead(_20) +> StorageDead(_17) +> StorageDead(_19) +> StorageDead(_16) + +--------------------------------# UNCHANGED------------------------------------------- +goto # UNCHANGED + +-> # UNCHANGED + +FakeRead(ForLet, _3) # UNCHANGED +StorageDead(_4) # UNCHANGED +_0 = () # UNCHANGED +StorageDead(_3) # UNCHANGED +StorageDead(_2) # UNCHANGED +StorageDead(_1) # UNCHANGED +-------------------------------------------------------------------------------------- +goto # UNCHANGED + +-> # UNCHANGED + +return # from main() # UNCHANGED + +*/ \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match_without_increment.mir b/src/test/codegen/coverage-experiments/src/match_without_increment.mir new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/src/test/codegen/coverage-experiments/src/match_without_increment.rs b/src/test/codegen/coverage-experiments/src/match_without_increment.rs new file mode 100644 index 0000000000000..fa85833e05434 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/match_without_increment.rs @@ -0,0 +1,5 @@ +fn main() { + let a = 1; + let b = 10; + let _result = match a < b { true => true, _ => false, }; +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match_without_increment_alt.mir b/src/test/codegen/coverage-experiments/src/match_without_increment_alt.mir new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/src/test/codegen/coverage-experiments/src/question_mark_err_status_handling_with_comments.rs b/src/test/codegen/coverage-experiments/src/question_mark_err_status_handling_with_comments.rs new file mode 100644 index 0000000000000..03d11b2c230ca --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/question_mark_err_status_handling_with_comments.rs @@ -0,0 +1,24 @@ +/* */ #[inline(always)] +/* */ pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { +/* */ result +/* */ } +/* */ +/* - */ fn main() { +/* : I */ for countdown in __incr_cov("start", 10..0) { // span is just the while test expression +/* : ┃ */ let _ = countdown; +/* : ┃ */ __incr_cov("top of for", ()); +/* ┃ - */ } +/* - */ } + + +// -Z unpretty=val -- present the input source, unstable (and less-pretty) variants; +// valid types are any of the types for `--pretty`, as well as: +// `expanded`, `expanded,identified`, +// `expanded,hygiene` (with internal representations), +// `everybody_loops` (all function bodies replaced with `loop {}`), +// `hir` (the HIR), `hir,identified`, +// `hir,typed` (HIR with types for each node), +// `hir-tree` (dump the raw HIR), +// `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR) + +// argument to `pretty` must be one of `normal`, `expanded`, `identified`, or `expanded,identified` diff --git a/src/test/codegen/coverage-experiments/src/while.rs b/src/test/codegen/coverage-experiments/src/while.rs new file mode 100644 index 0000000000000..3cb185eda544f --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/while.rs @@ -0,0 +1,23 @@ +#[inline(always)] +pub fn __incr_cov(_region_loc: &str, result: T) -> T { + result +} + +fn main() { + let mut countdown = 10; + __incr_cov("block start",()); + while __incr_cov("while test", countdown > 0) { + countdown -= 1; + } + + let mut countdown = 10; + __incr_cov("after first while loop",()); + while __incr_cov("while test", countdown > 0) { + countdown -= 1; + if countdown < 5 { + __incr_cov("top of if countdown < 5",()); + break; + } + countdown -= 2; + } +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/while_clean.rs b/src/test/codegen/coverage-experiments/src/while_clean.rs new file mode 100644 index 0000000000000..e9ed1efc220d4 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/while_clean.rs @@ -0,0 +1,6 @@ +fn main() { + let mut countdown = 10; + while countdown > 0 { + countdown -= 1; + } +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/while_early_return.rs b/src/test/codegen/coverage-experiments/src/while_early_return.rs new file mode 100644 index 0000000000000..35709ffba3a04 --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/while_early_return.rs @@ -0,0 +1,10 @@ +fn main() -> u8 { // this will lower to HIR but will not compile: `main` can only return types that implement `std::process::Termination` + let mut countdown = 10; + while countdown > 0 { + if false { + return if countdown > 8 { 1 } else { return 2; }; + } + countdown -= 1; + } + 0 +} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/while_with_comments.rs b/src/test/codegen/coverage-experiments/src/while_with_comments.rs new file mode 100644 index 0000000000000..56417fedf00df --- /dev/null +++ b/src/test/codegen/coverage-experiments/src/while_with_comments.rs @@ -0,0 +1,51 @@ +/* */ #[inline(always)] +/* */ pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { +/* */ result +/* */ } +/* */ +/* - */ fn main() { +/* ┃ */ let mut countdown = 10; +/* ┃ */ __incr_cov("block start",()); // Must increment before repeated while text expression +/* : I */ while __incr_cov("while test", countdown > 0) { // span is just the while test expression +/* : ┃ */ countdown -= 1; +/* : ┃ */ // __incr_cov("while loop",()); // Counter not needed, but span is computed as "while test" minus "block start" +/* : ┃ */ // If while criteria is tested 11 times, and the outer block runs only once, 11-1 = 10 +/* : ┃ */ // REMOVING COUNTER ASSUMES NO EARLY RETURN THOUGH. +/* : ┃ */ // I THINK WE CAN ONLY USE THE COUNTER EXPRESSION UP TO FIRST CONDITIONAL BLOCK, IF ANY (if, match, maybe any loop) +/* ┃ - */ } + + let mut countdown = 10; + __incr_cov("after first while loop",()); + while __incr_cov("while test", countdown > 0) { + countdown -= 1; + // if __incr_cov("top of while loop", countdown < 5) { + if countdown < 5 { // "top of while loop" = counter expression "while test" - "after first while loop" + __incr_cov("top of if countdown < 5",()); + break; + } + countdown -= 2; + // __incr_cov("after if countdown < 5 block", ()); + // "after if countdown < 5 block" = counter expression "top of while loop" - "top of if countdown < 5" + // HOWEVER, WE CAN ONLY REMOVE THE COUNTER AND USE COUNTER EXPRESSION IF WE **KNOW** THAT THE BODY OF THE IF + // WILL **ALWAYS** BREAK (OR RETURN, OR CONTINUE?) + // AND THUS WE TREAT THE STATEMENTS FOLLOWING THE IF BLOCK AS IF THEY WERE AN ELSE BLOCK. + // THAT'S A LOT TO ASK. + + // PERHAPS TREAT EARLY RETURNS AS A SPECIAL KIND OF COUNTER AND IF ANY ARE INVOKED BEFORE STATEMENTS AFTER THE BLOCK THAT CONTAINS THEM, + // THEN SUBTRACT THOSE COUNTS FROM THE COUNT BEFORE THE BLOCK (AS WE DO HERE)? (SO ONE SET OF EXPRESSIONS MUST SUM ALL OF THE EARLY + // RETURNS) + } +/* - */ } + + +// -Z unpretty=val -- present the input source, unstable (and less-pretty) variants; +// valid types are any of the types for `--pretty`, as well as: +// `expanded`, `expanded,identified`, +// `expanded,hygiene` (with internal representations), +// `everybody_loops` (all function bodies replaced with `loop {}`), +// `hir` (the HIR), `hir,identified`, +// `hir,typed` (HIR with types for each node), +// `hir-tree` (dump the raw HIR), +// `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR) + +// argument to `pretty` must be one of `normal`, `expanded`, `identified`, or `expanded,identified` From 088037a04414dee0b3a792a6fad193e081ee8e37 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Thu, 4 Jun 2020 17:47:21 -0700 Subject: [PATCH 16/34] explained lang_item function body (count_code_region) --- src/libcore/intrinsics.rs | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index abb35e838ea28..0e5af35229ca0 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1943,6 +1943,12 @@ extern "rust-intrinsic" { pub fn miri_start_panic(payload: *mut u8) -> !; } +// Since `count_code_region` is lang_item, it must have a function body that the compiler can use +// to register its DefId with the lang_item entry. This function body is never actually called +// (and is therefore implemented as an aborting stub) because it is replaced with the +// LLVM intrinsic `llvm.instrprof.increment` by +// `rustc_codegen_llvm::intrinsic::IntrinsicCallMethods::codegen_intrinsic_call()`. +#[doc(hidden)] #[cfg(not(bootstrap))] #[cfg_attr(not(bootstrap), lang = "count_code_region")] pub fn count_code_region(_index: u32) { From 2c5c2a6bc2f7023ee8ad252d2ee5a45fbfb2de22 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Thu, 4 Jun 2020 17:52:27 -0700 Subject: [PATCH 17/34] removed experiments for cleaner github PR --- src/libcore/intrinsics.rs | 17 +- .../codegen/coverage-experiments/Cargo.lock | 5 - .../codegen/coverage-experiments/Cargo.toml | 103 ----- .../README-THIS-IS-TEMPORARY.md | 157 -------- .../src/coverage_injection_test.rs | 335 ---------------- .../src/coverage_injection_test2.rs | 320 ---------------- .../src/coverage_injection_test_alt.rs | 362 ------------------ .../coverage-experiments/src/drop_trait.rs | 25 -- .../src/drop_trait_with_comments_prints.rs | 53 --- .../codegen/coverage-experiments/src/for.rs | 41 -- .../src/for_with_comments.rs | 24 -- .../codegen/coverage-experiments/src/if.rs | 80 ---- .../src/if_with_comments.rs | 39 -- .../src/increment_intrinsic.rs | 11 - .../coverage-experiments/src/just_main.rs | 3 - .../coverage-experiments/src/lazy_boolean.rs | 17 - .../src/loop_break_value.rs | 15 - .../codegen/coverage-experiments/src/match.rs | 22 -- .../src/match_with_increment.rs | 305 --------------- .../src/match_with_increment_alt.rs | 296 -------------- .../src/match_without_increment.mir | 0 .../src/match_without_increment.rs | 5 - .../src/match_without_increment_alt.mir | 0 ..._mark_err_status_handling_with_comments.rs | 24 -- .../codegen/coverage-experiments/src/while.rs | 23 -- .../coverage-experiments/src/while_clean.rs | 6 - .../src/while_early_return.rs | 10 - .../src/while_with_comments.rs | 51 --- 28 files changed, 9 insertions(+), 2340 deletions(-) delete mode 100644 src/test/codegen/coverage-experiments/Cargo.lock delete mode 100644 src/test/codegen/coverage-experiments/Cargo.toml delete mode 100644 src/test/codegen/coverage-experiments/README-THIS-IS-TEMPORARY.md delete mode 100644 src/test/codegen/coverage-experiments/src/coverage_injection_test.rs delete mode 100644 src/test/codegen/coverage-experiments/src/coverage_injection_test2.rs delete mode 100644 src/test/codegen/coverage-experiments/src/coverage_injection_test_alt.rs delete mode 100644 src/test/codegen/coverage-experiments/src/drop_trait.rs delete mode 100644 src/test/codegen/coverage-experiments/src/drop_trait_with_comments_prints.rs delete mode 100644 src/test/codegen/coverage-experiments/src/for.rs delete mode 100644 src/test/codegen/coverage-experiments/src/for_with_comments.rs delete mode 100644 src/test/codegen/coverage-experiments/src/if.rs delete mode 100644 src/test/codegen/coverage-experiments/src/if_with_comments.rs delete mode 100644 src/test/codegen/coverage-experiments/src/increment_intrinsic.rs delete mode 100644 src/test/codegen/coverage-experiments/src/just_main.rs delete mode 100644 src/test/codegen/coverage-experiments/src/lazy_boolean.rs delete mode 100644 src/test/codegen/coverage-experiments/src/loop_break_value.rs delete mode 100644 src/test/codegen/coverage-experiments/src/match.rs delete mode 100644 src/test/codegen/coverage-experiments/src/match_with_increment.rs delete mode 100644 src/test/codegen/coverage-experiments/src/match_with_increment_alt.rs delete mode 100644 src/test/codegen/coverage-experiments/src/match_without_increment.mir delete mode 100644 src/test/codegen/coverage-experiments/src/match_without_increment.rs delete mode 100644 src/test/codegen/coverage-experiments/src/match_without_increment_alt.mir delete mode 100644 src/test/codegen/coverage-experiments/src/question_mark_err_status_handling_with_comments.rs delete mode 100644 src/test/codegen/coverage-experiments/src/while.rs delete mode 100644 src/test/codegen/coverage-experiments/src/while_clean.rs delete mode 100644 src/test/codegen/coverage-experiments/src/while_early_return.rs delete mode 100644 src/test/codegen/coverage-experiments/src/while_with_comments.rs diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 0e5af35229ca0..06a432a26961e 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1943,16 +1943,17 @@ extern "rust-intrinsic" { pub fn miri_start_panic(payload: *mut u8) -> !; } -// Since `count_code_region` is lang_item, it must have a function body that the compiler can use -// to register its DefId with the lang_item entry. This function body is never actually called -// (and is therefore implemented as an aborting stub) because it is replaced with the -// LLVM intrinsic `llvm.instrprof.increment` by -// `rustc_codegen_llvm::intrinsic::IntrinsicCallMethods::codegen_intrinsic_call()`. -#[doc(hidden)] +/// Defines the `count_code_region` intrinsic as a `LangItem`. `LangItem`s require a function body +/// to register its DefId with the LangItem entry. The function body is never actually called (and +/// is therefore implemented as an aborting stub) because it is replaced with the LLVM intrinsic +/// `llvm.instrprof.increment` by +/// `rustc_codegen_llvm::intrinsic::IntrinsicCallMethods::codegen_intrinsic_call()`. #[cfg(not(bootstrap))] #[cfg_attr(not(bootstrap), lang = "count_code_region")] -pub fn count_code_region(_index: u32) { - #[cfg_attr(not(bootstrap), allow(unused_unsafe))] // remove `unsafe` on bootstrap bump +fn count_code_region(_index: u32) { + // remove `unsafe` (and safety comment) on bootstrap bump + #[cfg_attr(not(bootstrap), allow(unused_unsafe))] + // SAFETY: the `abort` intrinsic has no requirements to be called. unsafe { abort() } diff --git a/src/test/codegen/coverage-experiments/Cargo.lock b/src/test/codegen/coverage-experiments/Cargo.lock deleted file mode 100644 index 132469cbb182c..0000000000000 --- a/src/test/codegen/coverage-experiments/Cargo.lock +++ /dev/null @@ -1,5 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -[[package]] -name = "coverage_experiments" -version = "0.1.0" diff --git a/src/test/codegen/coverage-experiments/Cargo.toml b/src/test/codegen/coverage-experiments/Cargo.toml deleted file mode 100644 index 296a8d5c9af2d..0000000000000 --- a/src/test/codegen/coverage-experiments/Cargo.toml +++ /dev/null @@ -1,103 +0,0 @@ -[workspace] - -[package] -name = "coverage_experiments" -version = "0.1.0" -license = "BSD-3-Clause" -authors = ["rust-fuchsia@fuchsia.com"] -edition = "2018" - -[[bin]] - -name = "coverage_injection_test" -path = "src/coverage_injection_test.rs" - -[[bin]] - -name = "coverage_injection_test2" -path = "src/coverage_injection_test2.rs" - -[[bin]] - -name = "while" -path = "src/while.rs" - -[[bin]] - -name = "while_clean" -path = "src/while_clean.rs" - -[[bin]] - -name = "while_early_return" -path = "src/while_early_return.rs" - -[[bin]] - -name = "if_with_comments" -path = "src/if_with_comments.rs" - -[[bin]] - -name = "if" -path = "src/if.rs" - -[[bin]] - -name = "increment_intrinsic" -path = "src/increment_intrinsic.rs" - -[[bin]] - -name = "just_main" -path = "src/just_main.rs" - -[[bin]] - -name = "lazy_boolean" -path = "src/lazy_boolean.rs" - -[[bin]] - -name = "match" -path = "src/match.rs" - -[[bin]] - -name = "match_without_increment" -path = "src/match_without_increment.rs" # identical to -Zunpretty=hir output - -[[bin]] - -name = "match_with_increment" -path = "src/match_with_increment.rs" - -[[bin]] - -name = "match_with_increment_alt" -path = "src/match_with_increment_alt.rs" - -[[bin]] - -name = "loop_break_value" -path = "src/loop_break_value.rs" - -[[bin]] - -name = "for_with_comments" -path = "src/for_with_comments.rs" - -[[bin]] - -name = "for" -path = "src/for.rs" - -[[bin]] - -name = "drop_trait" -path = "src/drop_trait.rs" - -#[dependencies] # Should not need to manually add coverage dependencies -#version = "0.1.0" -#path = "../__builtin" # for mod __builtin::coverage - diff --git a/src/test/codegen/coverage-experiments/README-THIS-IS-TEMPORARY.md b/src/test/codegen/coverage-experiments/README-THIS-IS-TEMPORARY.md deleted file mode 100644 index 3b69c0a406594..0000000000000 --- a/src/test/codegen/coverage-experiments/README-THIS-IS-TEMPORARY.md +++ /dev/null @@ -1,157 +0,0 @@ -# codegen/coverage-experiments -*

THIS DIRECTORY IS TEMPORARY

* - -This directory contains some work-in-progress (WIP) code used for experimental development and -testing of Rust Coverage feature development. - -The code in this directory will be removed, or migrated into product tests, when the Rust -Coverage feature is complete. - -[TOC] - -## Development Notes - -### config.toml - -config.toml probably requires (I should verify that intrinsic `llvm.instrprof.increment` -code generation ONLY works with this config option): - - profiler = true - -## First build - -```shell -./x.py clean -./x.py build -i --stage 1 src/libstd -``` - -## Incremental builds *IF POSSIBLE!* - -```shell -./x.py build -i --stage 1 src/libstd --keep-stage 1 -``` - -*Note: Some changes made for Rust Coverage required the full build (without `--keep-stage 1`), and in some cases, required `./x.py clean` first!. Occassionally I would get errors when building or when compiling a test program with `--Zinstrument-coverage` that work correctly only after a full clean and build.* - -## Compile a test program with LLVM coverage instrumentation - -*Note: This PR is still a work in progress. At the time of this writing, the `llvm.instrprof.increment` intrinsic is injected, and recognized by the LLVM code generation stage, but it does not appear to be included in the final binary. This is not surprising since other steps are still to be implemented, such as generating the coverage map. See the suggested additional `llvm` flags for ways to verify the `llvm` passes at least get the right intrinsic.* - -Suggested debug configuration to confirm Rust coverage features: -```shell -$ export RUSTC_LOG=rustc_codegen_llvm::intrinsic,rustc_mir::transform::instrument_coverage=debug -``` - -Ensure the new compiled `rustc` is used (the path below, relative to the `rust` code repository root, is an example only): - -```shell -$ build/x86_64-unknown-linux-gnu/stage1/bin/rustc \ - src/test/codegen/coverage-experiments/just_main.rs \ - -Zinstrument-coverage -``` - -### About the test programs in coverage-experiments/src/ - -The `coverage-experiments/src/` directory contains some sample (and very simple) Rust programs used to analyze Rust compiler output at various stages, with or without the Rust code coverage compiler option. For now, these are only used for the in-progress development and will be removed at a future date. (These are *not* formal test programs.) - -The src director may also contain some snapshots of mir output from experimentation, particularly if the saved snapshots highlight results that are important to the future development, individually or when compared with other output files. - -Be aware that some of the files and/or comments may be outdated. - -### Additional `llvm` flags (append to the `rustc` command) - -These optional flags generate additional files and/or terminal output. LLVM's `-print-before=all` should show the `instrprof.increment` intrinsic with arguments computed by the experimental Rust coverage feature code: - -```shell - --emit llvm-ir \ - -Zverify-llvm-ir \ - -Zprint-llvm-passes \ - -Csave-temps \ - -Cllvm-args=-print-before-all -``` - -### Additional flags for MIR analysis and transforms - -These optional flags generate a directory with many files representing the MIR as text (`.mir` files) and as a visual graph (`.dot` files) rendered by `graphviz`. (**Some IDEs, such as `VSCode` have `graphviz` extensions.**) - -```shell - -Zdump-mir=main \ - -Zdump-mir-graphviz -``` - -### Flags I've used but appear to be irrelvant to `-Zinstrument-coverage` after all: -```shell - # -Zprofile - # -Ccodegen-units=1 - # -Cinline-threshold=0 - # -Clink-dead-code - # -Coverflow-checks=off -``` - -## Run the test program compiled with code coverage instrumentation (maybe): - -As stated above, at the time of this writing, this work-in-progress seems to generate `llvm.instrprof.increment` intrinsic calls correctly, and are visibile in early `llvm` code generation passes, but are eventually stripped. - -The test program should run as expected, currently does not generate any coverage output. - -*Example:* - -```shell - $ src/test/codegen/coverage-experiments/just_main - hello world! (should be covered) -``` - -### Running the coverage-enabled `rustc` compiler in the `lldb` debugger: - -For example, to verify the intrinsic is codegen'ed, set breakpoint in `lldb` where it validates a certain instruction is the `llvm.instrprof.increment` instruction. - -First, update config.toml for debugging: - -```toml - [llvm] - optimize = false - release-debuginfo = true - - [rust] - debug = true - debuginfo-level = 2 -``` - -*(Note, in case this is relevant after all, I also have the following changes; but I don't think I need them:)* - -```toml - # Add and uncomment these if relevant/useful: - # codegen-units = 0 - # python = '/usr/bin/python3.6' -``` - -Run the compiler with additional flags as needed: - -```shell -lldb \ - build/x86_64-unknown-linux-gnu/stage1/bin/rustc \ - -- \ - src/test/codegen/coverage-experiments/just_main.rs \ - -Zinstrument-coverage \ - -Zdump-mir=main \ - -Zdump-mir-graphviz -``` - -Note the specific line numbers may be different: - -```c++ -(lldb) b lib/Transforms/Instrumentation/InstrProfiling.cpp:418 -(lldb) r - -Process 93855 stopped -* thread #6, name = 'rustc', stop reason = breakpoint 2.1 - frame #0: 0x00007fffedff7738 librustc_driver-5a0990d8d18fb2b4.so`llvm::InstrProfiling::lowerIntrinsics(this=0x00007fffcc001d40, F=0x00007fffe4552198) at InstrProfiling.cpp:418:23 - 415 auto Instr = I++; - 416 InstrProfIncrementInst *Inc = castToIncrementInst(&*Instr); - 417 if (Inc) { --> 418 lowerIncrement(Inc); - 419 MadeChange = true; - 420 } else if (auto *Ind = dyn_cast(Instr)) { - 421 lowerValueProfileInst(Ind); -(lldb) -``` \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/coverage_injection_test.rs b/src/test/codegen/coverage-experiments/src/coverage_injection_test.rs deleted file mode 100644 index 231da1dc1a67f..0000000000000 --- a/src/test/codegen/coverage-experiments/src/coverage_injection_test.rs +++ /dev/null @@ -1,335 +0,0 @@ -/* */ use std::io::Error; -/* */ use std::io::ErrorKind; -/* */ -/* */ /// Align Rust counter increment with with: -/* */ /// [‘llvm.instrprof.increment’ Intrinsic](https://llvm.org/docs/LangRef.html#llvm-instrprof-increment-intrinsic) -/* */ /// -/* */ /// declare void @llvm.instrprof.increment(i8* , i64 , i32 , i32 ) -/* */ /// -/* */ /// The first argument is a pointer to a global variable containing the name of the entity -/* */ /// being instrumented. This should generally be the (mangled) function name for a set of -/* */ /// counters. -/* */ /// -/* */ /// The second argument is a hash value that can be used by the consumer of the profile data -/* */ /// to detect changes to the instrumented source, and the third is the number of counters -/* */ /// associated with name. It is an error if hash or num-counters differ between two -/* */ /// instances of instrprof.increment that refer to the same name. -/* */ /// -/* */ /// The last argument refers to which of the counters for name should be incremented. It -/* */ /// should be a value between 0 and num-counters. -/* */ /// -/* */ /// # Arguments -/* */ /// -/* */ /// `mangled_fn_name` - &'static ref to computed and injected static str, using: -/* */ /// -/* */ /// ``` -/* */ /// fn rustc_symbol_mangling::compute_symbol_name( -/* */ /// tcx: TyCtxt<'tcx>, -/* */ /// instance: Instance<'tcx>, -/* */ /// compute_instantiating_crate: impl FnOnce() -> CrateNum, -/* */ /// ) -> String -/* */ /// ``` -/* */ /// -/* */ /// `source_version_hash` - Compute hash based that only changes if there are "significant" -/* */ /// to control-flow inside the function. -/* */ /// -/* */ /// `num_counters` - The total number of counter calls [MAX(counter_index) + 1] within the -/* */ /// function. -/* */ /// -/* */ /// `counter_index` - zero-based counter index scoped by the function. (Ordering of -/* */ /// counters, relative to the source code location, is apparently not expected.) -/* */ /// -/* */ /// # Notes -/* */ /// -/* */ /// * The mangled_fn_name may not be computable until generics are monomorphized (see -/* */ /// parameters required by rustc_symbol_mangling::compute_symbol_name). -/* */ /// * The version hash may be computable from AST analysis, and may not benefit from further -/* */ /// lowering. -/* */ /// * num_counters depends on having already identified all counter insertion locations. -/* */ /// * counter_index can be computed at time of counter insertion (incrementally). -/* */ /// * Numeric parameters are signed to match the llvm increment intrinsic parameter types. -/* */ fn __lower_incr_cov(_mangled_fn_name: &'static str, _fn_version_hash: i64, _num_counters: i32, _counter_index: i32) { -/* */ } -/* */ -/* */ /// A coverage counter implementation that will work as both an intermediate coverage -/* */ /// counting and reporting implementation at the AST-level only--for debugging and -/* */ /// development--but also serves as a "marker" to be replaced by calls to LLVM -/* */ /// intrinsic coverage counter APIs during the lowering process. -/* */ /// -/* */ /// Calls to this function will be injected automatically into the AST. When LLVM intrinsics -/* */ /// are enabled, the counter function calls that were injected into the AST serve as -/* */ /// placeholders, to be replaced by an alternative, such as: -/* */ /// -/* */ /// * direct invocation of the `llvm.instrprof.increment()` intrinsic; or -/* */ /// * the `__lower_incr_cov()` function, defined above, that would invoke the -/* */ /// `llvm.instrprof.increment()` intrinsic; or -/* */ /// * a similar expression wrapper, with the additional parameters (as defined above -/* */ /// for `__lower_incr_cov()`, that invokes `llvm.instrprof.increment()` and returns the -/* */ /// result of the wrapped expression) -/* */ /// -/* */ /// The first two options would require replacing the inlined wrapper call with something -/* */ /// like: -/* */ /// -/* */ /// ``` -/* */ /// { let result = {expr}; __inlined_incr_cov(context, counter); result } -/* */ /// ``` -/* */ /// -/* */ /// But if the lowering process is already unwrapping the inlined call to `__incr_cov()`, then -/* */ /// it may be a perfect opportunity to replace the function with one of these more -/* */ /// direct methods. -/* */ /// -/* */ #[inline(always)] -/* */ pub fn __incr_cov(region_loc: &str, /*index: u32,*/ result: T) -> T { -/* */ // Either call the intermediate non-llvm coverage counter API or -/* */ // replace the call to this function with the expanded `__lower_incr_cov()` call. -/* */ -/* */ // let _lock = increment_counter(counter); -/* */ println!("{}", region_loc); -/* */ -/* */ result -/* */ } -/* */ -/* */ /// Write a report identifying each incremented counter and the number of times each counter -/* */ /// was incremented. -/* */ fn __report() { -/* */ println!("WRITE REPORT!"); -/* */ } -/* */ -/* */ /// Increment the counter after evaluating the wrapped expression (see `__incr_cov()`), then -/* */ /// write a report identifying each incremented counter and the number of times each counter -/* */ /// was incremented. -/* */ #[inline(always)] -/* */ pub fn __incr_cov_and_report(region_loc: &str, /*counter: u32,*/ result: T) -> T { -/* */ __incr_cov(region_loc, /*counter,*/ ()); -/* */ __report(); -/* */ result -/* */ } -/* */ -/* */ macro_rules! from { -/* */ ($from:expr) => { &format!("from: {}\n to: {}:{}:{}", $from, file!(), line!(), column!()) }; -/* */ } -/* */ -/* */ #[derive(Debug)] -/* */ enum TestEnum { -/* */ Red, -/* */ Green, -/* */ Blue, -/* */ } -/* */ -/* */ struct TestStruct { -/* */ field: i32, -/* */ } -/* */ -/* */ // IMPORTANT! IS WRAPPING main() ENOUGH? OR DO I ALSO NEED TO WRAP THREAD FUNCTIONS, ASSUMING -/* */ // THEY ARE STILL RUNNING WITH MAIN EXITS? (IF THEY CAN). NOT SURE HOW RUST HANDLES THAT. -/* */ -/* */ // I SUSPECT USING THREAD_LOCAL COUNTERS MAY NOT ACTUALLY BE AN OPTIMIZATION OVER MUTEX LOCKS, -/* */ // BUT MAYBE I SHOULD ASK. -/* */ -/* */ impl TestStruct { -/* - */ fn new() -> Self { -/* ┃ */ __incr_cov(from!("fn new()"),Self::new_with_value(31415)) // function-scoped counter index = 0 -/* - */ } -/* */ -/* - */ fn new_with_value(field: i32) -> Self { -/* ┃ */ __incr_cov(from!("fn new_with_value()"),Self { -/* ┃ */ field, -/* ┃ */ }) // function-scoped counter index = 0 -/* - */ } -/* */ -/* */ fn call_closure(&self, closure: F) -> bool -/* */ where -/* */ F: FnOnce( -/* */ i32, -/* */ ) -> bool, -/* - */ { -/* ┃ */ __incr_cov(from!("fn call_closure()"),closure(123)) // function-scoped counter index = 0 -/* - */ } -/* */ -/* - */ fn various(&self) -> Result<(),Error> { -/* ┃ */ use TestEnum::*; -/* ┃ */ let mut color = Red; -/* ┃ */ let _ = color; -/* ┃ */ color = Blue; -/* ┃ */ let _ = color; -/* ┃ */ color = Green; -/* ┃ */ match __incr_cov(from!("fn various"),color) { // function-scoped counter index = 0 -/* : */ -/* : */ // !!! RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK (THE FUNCTION IN THIS CASE) TO END OF MATCH EXPRESSION -/* : */ // If `match`, `while`, `loop`, `for`, `if`, etc. expression has a `return`, `break`, or `continue` -/* : */ // (if legal), then RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK TO END OF `return` EXPRESSION -/* : */ // If the expression includes lazy booleans, nest calls to `__incr_cov()`. -/* : I */ Red => __incr_cov(from!("Red => or end of MatchArmGuard expression inside pattern, if any"),println!("roses")), -/* : - */ Green => { -/* : ┃ */ let spidey = 100; -/* : ┃ */ let goblin = 50; -/* : ┃ */ // if spidey > goblin {__incr_cov(from!(""),{ -/* : ┃ */ // println!("what ev"); -/* : ┃ */ // })} -/* : ┃ */ // ACTUALLY, WRAPPING THE ENTIRE IF BLOCK IN `__incr_cov` IS NOT A GREAT GENERAL RULE. -/* : ┃ */ // JUST INSERTING A `return`, `break`, or `continue` IN THAT BLOCK (without an intermediate condition) -/* : ┃ */ // MAKES THE `__incr_cov()` CALL UNREACHABLE! -/* : ┃ */ // MY ORIGINAL SOLUTION WORKS BETTER (WRAP LAST EXPRESSION OR AFTER LAST SEMICOLON STATEMENT IN BLOCK) -/* : ┃ */ // UNLESS THE EXPRESSION IS NOT A BLOCK. -/* : ┃ - */ if __incr_cov(from!("Green => or end of MatchArmGuard expression inside pattern, if any"),spidey > goblin) { -/* : : ┃ */ println!("spidey beats goblin"); -/* : : ┃ */ __incr_cov(from!("block start"),()); -/* : ┃ - */ } else if __incr_cov(from!("`else if` on this line"),spidey == goblin) { -/* : : ┃ */ // COVERAGE NOTE: Do we mark only the expression span (that may be trivial, as in this case), -/* : : ┃ */ // or associate it with the outer block, similar to how the `if` expression is associated with -/* : : ┃ */ // the outer block? (Although it is a continuation, in a sense, it is discontiguous in this case, -/* : : ┃ */ // so I think simpler to just make it its own coverage region.) -/* : : ┃ */ println!("it's a draw"); -/* : : ┃ */ __incr_cov(from!("block start"),()); -/* : ┃ - - - */ } else if if __incr_cov(from!("`else if` on this line"),true) { -/* : : : ┃ */ // return __incr_cov(from!("after `if true`"),Ok(())); -/* : : : ┃ */ // ACTUALLY, BECAUSE OF `return`, WE DO NOT RECORD THE `if true` EVEN THOUGH WE COVERED IT. -/* : : : ┃ */ // IN FACT, IF THIS NESTED CONDITIONAL IN A CONDITIONAL EXPRESSION WAS AN `if` (WITHOUT PRECEDING ELSE) -/* : : : ┃ */ // WE WOULD NOT HAVE RECORDED THE COVERAGE OF STATEMENTS LEADING UP TO THE `if`, SO -/* : : : ┃ */ // IT SHOULD BE: -/* ┏-:---:-------:---< */ return __incr_cov(from!(""),Ok(())); -/* V : : : : */ // NOTE THE `from` STRING IS SAME FOR THE `else if`s `__incr_cov` AND THIS `return`. -/* : : : : */ // ONLY ONE OF THESE WILL EXECUTE, TO RECORD COVERAGE FROM THAT SPOT. -/* : : ┃ - */ } else { -/* : : : I */ __incr_cov(from!("`else`"),false) -/* : : - - */ } { -/* : : ┃ */ println!("wierd science"); -/* : : ┃ */ __incr_cov(from!("block start"),()); -/* : ┃ - */ } else { -/* : : ┃ */ println!("goblin wins"); -/* ┏-:---:---< */ return __incr_cov(from!("`else`"),Ok(())); // THIS COUNTS LAST STATEMENT IN `else` BLOCK -/* V : : : */ // COVERAGE NOTE: When counting the span for `return`, -/* : : : */ // `break`, or `continue`, also report the outer spans -/* : : : */ // got this far--including this `else` block. Record -/* : : : */ // The start positions for those outer blocks, but: -/* : : : */ // * For the block containing the `return`, `break`, or -/* : : : */ // `continue`, end report the end position is the -/* : : : */ // start of the `return` span (or 1 char before it). -/* : : : */ // * Anything else? -/* : ┃ - */ } -/* : ┃ - */ // __incr_cov(from!(""),()); // DO NOT COUNT HERE IF NO STATEMENTS AFTER LAST `if` or `match` -/* : - */ }, -/* : I */ Blue => __incr_cov(from!("Blue => or end of MatchArmGuard expression inside pattern, if any"),println!("violets")), -/* ┃ */ } -/* ┃ */ -/* ┃ */ let condition1 = true; -/* ┃ */ let condition2 = false; -/* ┃ */ let condition3 = true; -/* ┃ */ -/* ┃ */ println!("Called `various()` for TestStruct with field={}", self.field); -/* ┃ */ -/* ┃ - */ if __incr_cov(from!("after block end of prior `match` (or `if-else if-else`)"),condition1) { -/* : ┃ */ println!("before while loop"); -/* : ┃ */ let mut countdown = 10; -/* : ┃ */ __incr_cov(from!("block start"),()); // Must increment before repeated while text expression -/* : : I */ while __incr_cov(from!("while test"), countdown > 0) { // span is just the while test expression -/* : : ┃ */ println!("top of `while` loop"); -/* : : ┃ */ countdown -= 1; -/* : : ┃ */ // __incr_cov(from!("while loop"),()); // Counter not needed, but span is computed as "while test" minus "block start" -/* : : ┃ */ // If test expression is 11, and the outer block runs only once, 11-1 = 10 -/* : ┃ - */ } -/* : ┃ */ println!("before for loop"); -/* : ┃ - */ for index in __incr_cov(from!("end of while"),0..10) { -/* : : ┃ */ println!("top of `for` loop"); -/* : : ┃ - */ if __incr_cov(from!("block start"),index == 8) { -/* : : : ┃ */ println!("before break"); -/* : : : ┃ */ // note the following is not legal here: -/* : : : ┃ */ // "can only break with a value inside `loop` or breakable block" -/* : : : ┃ */ // break __incr_cov(from!(""),()); -/* : : : ┃ */ __incr_cov(from!("block start"),()); -/* : : ┏-----< */ break; -/* : : V : : */ -/* : : : : */ // FIXME(richkadel): add examples with loop labels, breaking out of inner and outer loop to outer loop label, with expression. -/* : : : : */ // May want to record both the span and the start position after the broken out block depdnding on label -/* : : ┃ - */ } -/* : : ┃ */ println!("after `break` test"); -/* : : ┃ - */ if __incr_cov(from!("block end of `if index == 8`"),condition2) { -/* ┏-:---:---:---< */ return __incr_cov(from!("block start"),Ok(())); -/* V : : ┃ - */ } -/* : : ┃ */ -/* : : ┃ */ // BECAUSE THE PREVIOUS COVERAGE REGION HAS A `return`, THEN -/* : : ┃ */ // IF PREVIOUS COVERAGE REGION IS NOT COUNTED THEN OUTER REGION REACHED HERE. -/* : : ┃ */ // ADD A COVERAGE REGION FOR THE SPAN FROM JUST AFTER PREVIOUS REGION TO END -/* : : ┃ */ // OF OUTER SPAN, THEN TRUNCATE TO NEXT REGION NOT REACHED. -/* : : ┃ - */ if index % 3 == 2 { // NO __incr_cov() HERE BECAUSE NO STATEMENTS BETWEEN LAST CONDITIONAL BLOCK AND START OF THIS ONE -/* : : Λ : ┃ */ __incr_cov(from!("block end of `if condition2`"),()); -/* : : ┗-----< */ continue; -/* : : ┃ - */ } -/* : : ┃ */ println!("after `continue` test"); -/* : : ┃ */ // maybe add a runtime flag for a possible `return` here? -/* : : ┃ */ __incr_cov(from!("for loop"),()); -/* : ┃ - */ } -/* : ┃ */ println!("after for loop"); -/* : ┃ */ let result = if { // START OF NEW CONDITIONAL EXPRESSION. NEXT "GUARANTEED" COUNTER SHOULD COUNT FROM END OF LAST CONDITIONAL EXPRESSION -/* : ┃ */ // A "GUARANTEED" COUNTER CALL IS ONE THAT WILL BE CALLED REGARDLESS OF OTHER CONDITIONS. THIS INCLUDES: -/* : ┃ */ // * A CONDITIONAL EXPRESSION THAT IS NOT A BLOCK (OR ANOTHER CONDITIONAL STATEMENT, WHICH WOULD CONTAIN A BLOCK) -/* : ┃ */ // * OR IF THE NEXT CONDITIONAL EXPRESSION IS A BLOCK OR CONDITIONAL STATEMENT, THEN THE FIRST "GUARANTEED" COUNTER IN THAT BLOCK -/* : ┃ */ // * END OF BLOCK IF THE BLOCK DOES NOT HAVE INNER CONDITIONAL EXPRESSIONS -/* : ┃ */ // * BRANCHING STATEMENTS (`return`, `break`, `continue`) BY EITHER WRAPPING THE BRANCH STATEMENT NON-BLOCK EXPRESSION, -/* : ┃ */ // OR PREPENDING A COUNTER WITH EMPTY TUPLE IF NO EXPRESSION, OR IF EXPRESSION IS A BLOCK, THEN THE NEXT "GUARANTEED" -/* : ┃ */ // COUNTER CALL WITHIN THAT BLOCK. -/* : ┃ */ // BASICALLY, CARRY THE START OF COVERAGE SPAN FORWARD UNTIL THE GUARANTEED COUNTER IS FOUND -/* : ┃ */ println!("after result = if ..."); -/* : ┃ - */ if __incr_cov(from!("block end of `for` loop"),condition2) { -/* : : ┃ */ println!("before first return"); -/* ┏-:---:-------< */ return __incr_cov(from!("block start"),Ok(())); -/* V : : - */ } else if __incr_cov(from!("`else`"),condition3) { -/* : : ┃ */ // THE ABOVE COUNTER IS _NOT_ REALLY NECESSARY IF EXPRESSION IS GUARANTEED TO EXECUTE. -/* : : ┃ */ // IF WE GET COUNTER IN `else if` BLOCK WE COVERED EXPRESSION. -/* : : ┃ */ // IF WE GET TO ANY REMAINING `else` or `else if` BLOCK WE KNOW WE EVALUATED THIS CONDITION -/* : : ┃ */ // AND ALL OTHERS UP TO THE EXECUTED BLOCK. BUT THE SPAN WOULD HAVE "HOLES" FOR UNEXECUTED BLOCKS. -/* : : ┃ */ println!("not second return"); -/* ┏-:---:-------< */ return __incr_cov(from!("block start"),Ok(())); -/* V : : - */ } else { -/* : : ┃ */ println!("not returning"); -/* : : ┃ */ __incr_cov(from!("block start"),false) -/* : : - */ } -/* : ┃ */ // NO COUNTER HERE BECAUSE NO STATEMENTS AFTER CONDITIONAL BLOCK -/* : ┃ - */ } { -/* : : ┃ */ println!("branched condition returned true"); -/* : : ┃ */ __incr_cov(from!(""),Ok(())) -/* : ┃ - */ } else if self.call_closure( -/* : : - */ |closure_param| __incr_cov(from!(""), -/* : : ┃ - */ if condition3 { -/* : : : ┃ */ println!("in closure, captured condition said to print the param {}", closure_param); -/* : : : ┃ */ __incr_cov(from!(""),false) -/* : : ┃ - */ } else { -/* : : : ┃ */ println!("in closure, captured condition was false"); -/* : : : ┃ */ __incr_cov(from!(""),true) -/* : : ┃ - */ } -/* : : - */ ) -/* : : - */ ) { -/* : : ┃ */ println!("closure returned true"); -/* : : ┃ */ __incr_cov(from!(""),Err(Error::new(ErrorKind::Other, "Result is error if closure returned true"))) -/* : ┃ - */ } else { -/* : : ┃ */ println!("closure returned false"); -/* : : ┃ */ __incr_cov(from!(""),Err(Error::new(ErrorKind::Other, "Result is error if closure returned false"))) -/* : ┃ - */ }; -/* : ┃ */ println!("bottom of function might be skipped if early `return`"); -/* : ┃ */ __incr_cov(from!("if condition1"),result) -/* ┃ - */ } else { -/* : ┃ */ println!("skipping everything in `various()`"); -/* : ┃ */ __incr_cov(from!(""),Ok(())) -/* ┃ - */ } -/* ┃ - */ // __incr_cov(from!(""),0) // DO NOT COUNT IF NO STATEMENTS AFTER CONDITIONAL BLOCK. ALL COVERAGE IS ALREADY COUNTED -/* - */ } -/* */ } -/* */ -/* - */ fn main() -> Result<(), std::io::Error> { -/* ┃ */ //let mut status: u8 = 2; -/* ┃ */ let mut status: u8 = 1; -/* : - */ let result = if status < 2 && -/* : ┃ */ __incr_cov(from!(""),{ -/* : ┃ */ status -= 1; -/* : ┃ */ status == 0 -/* : - - */ }) { -/* : ┃ */ let test_struct = TestStruct::new_with_value(100); -/* : ┃ */ let _ = test_struct.various(); -/* ┏-:---< */ return __incr_cov_and_report(from!(""),Err(Error::new(ErrorKind::Other, format!("Error status {}", status)))) -/* V : - */ } else { -/* : ┃ */ let test_struct = TestStruct::new(); -/* : ┃ */ __incr_cov(from!(""),test_struct.various()) -/* : - */ }; -/* ┃ */ println!("done"); -/* ┃ */ __incr_cov_and_report(from!(""),result) // function-scoped counter index = 0 -/* - */ } \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/coverage_injection_test2.rs b/src/test/codegen/coverage-experiments/src/coverage_injection_test2.rs deleted file mode 100644 index 8f4399ab51d09..0000000000000 --- a/src/test/codegen/coverage-experiments/src/coverage_injection_test2.rs +++ /dev/null @@ -1,320 +0,0 @@ -/* */ use std::io::Error; -/* */ use std::io::ErrorKind; -/* */ -/* */ /// Align Rust counter increment with with: -/* */ /// [‘llvm.instrprof.increment’ Intrinsic](https://llvm.org/docs/LangRef.html#llvm-instrprof-increment-intrinsic) -/* */ /// -/* */ /// declare void @llvm.instrprof.increment(i8* , i64 , i32 , i32 ) -/* */ /// -/* */ /// The first argument is a pointer to a global variable containing the name of the entity -/* */ /// being instrumented. This should generally be the (mangled) function name for a set of -/* */ /// counters. -/* */ /// -/* */ /// The second argument is a hash value that can be used by the consumer of the profile data -/* */ /// to detect changes to the instrumented source, and the third is the number of counters -/* */ /// associated with name. It is an error if hash or num-counters differ between two -/* */ /// instances of instrprof.increment that refer to the same name. -/* */ /// -/* */ /// The last argument refers to which of the counters for name should be incremented. It -/* */ /// should be a value between 0 and num-counters. -/* */ /// -/* */ /// # Arguments -/* */ /// -/* */ /// `mangled_fn_name` - &'static ref to computed and injected static str, using: -/* */ /// -/* */ /// ``` -/* */ /// fn rustc_symbol_mangling::compute_symbol_name( -/* */ /// tcx: TyCtxt<'tcx>, -/* */ /// instance: Instance<'tcx>, -/* */ /// compute_instantiating_crate: impl FnOnce() -> CrateNum, -/* */ /// ) -> String -/* */ /// ``` -/* */ /// -/* */ /// `source_version_hash` - Compute hash based that only changes if there are "significant" -/* */ /// to control-flow inside the function. -/* */ /// -/* */ /// `num_counters` - The total number of counter calls [MAX(counter_index) + 1] within the -/* */ /// function. -/* */ /// -/* */ /// `counter_index` - zero-based counter index scoped by the function. (Ordering of -/* */ /// counters, relative to the source code location, is apparently not expected.) -/* */ /// -/* */ /// # Notes -/* */ /// -/* */ /// * The mangled_fn_name may not be computable until generics are monomorphized (see -/* */ /// parameters required by rustc_symbol_mangling::compute_symbol_name). -/* */ /// * The version hash may be computable from AST analysis, and may not benefit from further -/* */ /// lowering. -/* */ /// * num_counters depends on having already identified all counter insertion locations. -/* */ /// * counter_index can be computed at time of counter insertion (incrementally). -/* */ /// * Numeric parameters are signed to match the llvm increment intrinsic parameter types. -/* */ fn __lower_incr_cov(_mangled_fn_name: &'static str, _fn_version_hash: i64, _num_counters: i32, _counter_index: i32) { -/* */ } -/* */ -/* */ /// A coverage counter implementation that will work as both an intermediate coverage -/* */ /// counting and reporting implementation at the AST-level only--for debugging and -/* */ /// development--but also serves as a "marker" to be replaced by calls to LLVM -/* */ /// intrinsic coverage counter APIs during the lowering process. -/* */ /// -/* */ /// Calls to this function will be injected automatically into the AST. When LLVM intrinsics -/* */ /// are enabled, the counter function calls that were injected into the AST serve as -/* */ /// placeholders, to be replaced by an alternative, such as: -/* */ /// -/* */ /// * direct invocation of the `llvm.instrprof.increment()` intrinsic; or -/* */ /// * the `__lower_incr_cov()` function, defined above, that would invoke the -/* */ /// `llvm.instrprof.increment()` intrinsic; or -/* */ /// * a similar expression wrapper, with the additional parameters (as defined above -/* */ /// for `__lower_incr_cov()`, that invokes `llvm.instrprof.increment()` and returns the -/* */ /// result of the wrapped expression) -/* */ /// -/* */ /// The first two options would require replacing the inlined wrapper call with something -/* */ /// like: -/* */ /// -/* */ /// ``` -/* */ /// { let result = {expr}; __inlined_incr_cov(context, counter); result } -/* */ /// ``` -/* */ /// -/* */ /// But if the lowering process is already unwrapping the inlined call to `__incr_cov()`, then -/* */ /// it may be a perfect opportunity to replace the function with one of these more -/* */ /// direct methods. -/* */ /// -/* */ #[inline(always)] -/* */ pub fn __incr_cov(region_loc: &str) { -/* */ // Either call the intermediate non-llvm coverage counter API or -/* */ // replace the call to this function with the expanded `__lower_incr_cov()` call. -/* */ -/* */ // let _lock = increment_counter(counter); -/* */ println!("{}", region_loc); -/* */ } -/* */ -/* */ /// Write a report identifying each incremented counter and the number of times each counter -/* */ /// was incremented. -/* */ fn __report() { -/* */ println!("WRITE REPORT!"); -/* */ } -/* */ -/* */ macro_rules! from { -/* */ ($from:expr) => { &format!("from: {}\n to: {}:{}:{}", $from, file!(), line!(), column!()) }; -/* */ } -/* */ -/* */ #[derive(Debug)] -/* */ enum TestEnum { -/* */ Red, -/* */ Green, -/* */ Blue, -/* */ } -/* */ -/* */ struct TestStruct { -/* */ field: i32, -/* */ } -/* */ -/* */ // IMPORTANT! IS WRAPPING main() ENOUGH? OR DO I ALSO NEED TO WRAP THREAD FUNCTIONS, ASSUMING -/* */ // THEY ARE STILL RUNNING WITH MAIN EXITS? (IF THEY CAN). NOT SURE HOW RUST HANDLES THAT. -/* */ -/* */ // I SUSPECT USING THREAD_LOCAL COUNTERS MAY NOT ACTUALLY BE AN OPTIMIZATION OVER MUTEX LOCKS, -/* */ // BUT MAYBE I SHOULD ASK. -/* */ -/* */ impl TestStruct { -/* - */ fn new() -> Self { -/* ┃ */ let __result = Self::new_with_value(31415); // function-scoped counter index = 0 -/* ┃ */ __incr_cov(from!("fn new()")); -/* ┃ */ __result -/* - */ } -/* */ -/* - */ fn new_with_value(field: i32) -> Self { -/* ┃ */ let __result = Self { -/* ┃ */ field, -/* ┃ */ }; -/* ┃ */ __incr_cov(from!("fn new_with_value()")); // function-scoped counter index = 0 -/* ┃ */ __result -/* - */ } -/* */ -/* */ fn call_closure(&self, closure: F) -> bool -/* */ where -/* */ F: FnOnce( -/* */ i32, -/* */ ) -> bool, -/* - */ { -/* ┃ */ let __result = closure(123); -/* ┃ */ __incr_cov(from!("fn call_closure()")); // function-scoped counter index = 0 -/* ┃ */ __result -/* - */ } -/* */ -/* - */ fn various(&self) -> Result<(),Error> { -/* ┃ */ use TestEnum::*; -/* ┃ */ let mut color = Red; -/* ┃ */ let _ = color; -/* ┃ */ color = Blue; -/* ┃ */ let _ = color; -/* ┃ */ color = Green; -/* ┃ */ match { let __result = color; __incr_cov(from!("fn various")); __result } { // function-scoped counter index = 0 -/* : */ -/* : */ // !!! RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK (THE FUNCTION IN THIS CASE) TO END OF MATCH EXPRESSION -/* : */ // If `match`, `while`, `loop`, `for`, `if`, etc. expression has a `return`, `break`, or `continue` -/* : */ // (if legal), then RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK TO END OF `return` EXPRESSION -/* : */ // If the expression includes lazy booleans, nest calls to `__incr_cov()`. -/* : I */ Red => {println!("roses"); __incr_cov(from!("Red => or end of MatchArmGuard expression inside pattern, if any"));} -/* : - */ Green => { -/* : ┃ */ let spidey = 100; -/* : ┃ */ let goblin = 50; -/* : ┃ */ // if spidey > goblin {__incr_cov(from!(""),{ -/* : ┃ */ // println!("what ev"); -/* : ┃ */ // })} -/* : ┃ */ // ACTUALLY, WRAPPING THE ENTIRE IF BLOCK IN `__incr_cov` IS NOT A GREAT GENERAL RULE. -/* : ┃ */ // JUST INSERTING A `return`, `break`, or `continue` IN THAT BLOCK (without an intermediate condition) -/* : ┃ */ // MAKES THE `__incr_cov()` CALL UNREACHABLE! -/* : ┃ */ // MY ORIGINAL SOLUTION WORKS BETTER (WRAP LAST EXPRESSION OR AFTER LAST SEMICOLON STATEMENT IN BLOCK) -/* : ┃ */ // UNLESS THE EXPRESSION IS NOT A BLOCK. -/* : ┃ - */ if { let __result = spidey > goblin; __incr_cov(from!("Green => or end of MatchArmGuard expression inside pattern, if any")); __result } { -/* : : ┃ */ println!("spidey beats goblin"); -/* : : ┃ */ __incr_cov(from!("block start")); -/* : ┃ - */ } else if { let __result = spidey == goblin; __incr_cov(from!("`else if` on this line")); __result } { -/* : : ┃ */ // COVERAGE NOTE: Do we mark only the expression span (that may be trivial, as in this case), -/* : : ┃ */ // or associate it with the outer block, similar to how the `if` expression is associated with -/* : : ┃ */ // the outer block? (Although it is a continuation, in a sense, it is discontiguous in this case, -/* : : ┃ */ // so I think simpler to just make it its own coverage region.) -/* : : ┃ */ println!("it's a draw"); -/* : : ┃ */ __incr_cov(from!("block start")); -/* : ┃ - - - */ } else if if { let __result = true; __incr_cov(from!("`else if` on this line")); __result } { -/* : : : ┃ */ // return __incr_cov(from!("after `if true`"),Ok(())); -/* : : : ┃ */ // ACTUALLY, BECAUSE OF `return`, WE DO NOT RECORD THE `if true` EVEN THOUGH WE COVERED IT. -/* : : : ┃ */ // IN FACT, IF THIS NESTED CONDITIONAL IN A CONDITIONAL EXPRESSION WAS AN `if` (WITHOUT PRECEDING ELSE) -/* : : : ┃ */ // WE WOULD NOT HAVE RECORDED THE COVERAGE OF STATEMENTS LEADING UP TO THE `if`, SO -/* : : : ┃ */ // IT SHOULD BE: -/* ┏-:---:-------:---< */ return { let __result = Ok(()); __incr_cov(from!("")); __result }; -/* V : : : : */ // NOTE THE `from` STRING IS SAME FOR THE `else if`s `__incr_cov` AND THIS `return`. -/* : : : : */ // ONLY ONE OF THESE WILL EXECUTE, TO RECORD COVERAGE FROM THAT SPOT. -/* : : ┃ - */ } else { -/* : : : I */ { let __result = false; __incr_cov(from!("`else`")); __result } -/* : : - - */ } { -/* : : ┃ */ println!("wierd science"); -/* : : ┃ */ __incr_cov(from!("block start")); -/* : ┃ - */ } else { -/* : : ┃ */ println!("goblin wins"); -/* ┏-:---:---< */ return { let __result = Ok(()); __incr_cov(from!("`else`")); __result }; // THIS COUNTS LAST STATEMENT IN `else` BLOCK -/* V : : : */ // COVERAGE NOTE: When counting the span for `return`, -/* : : : */ // `break`, or `continue`, also report the outer spans -/* : : : */ // got this far--including this `else` block. Record -/* : : : */ // The start positions for those outer blocks, but: -/* : : : */ // * For the block containing the `return`, `break`, or -/* : : : */ // `continue`, end report the end position is the -/* : : : */ // start of the `return` span (or 1 char before it). -/* : : : */ // * Anything else? -/* : ┃ - */ } -/* : ┃ - */ // __incr_cov(from!("")); // DO NOT COUNT HERE IF NO STATEMENTS AFTER LAST `if` or `match` -/* : - */ }, -/* : I */ Blue => { println!("violets"); __incr_cov(from!("Blue => or end of MatchArmGuard expression inside pattern, if any")); } -/* ┃ */ } -/* ┃ */ -/* ┃ */ let condition1 = true; -/* ┃ */ let condition2 = false; -/* ┃ */ let condition3 = true; -/* ┃ */ -/* ┃ */ println!("Called `various()` for TestStruct with field={}", self.field); -/* ┃ */ -/* ┃ - */ if { let __result = condition1; __incr_cov(from!("after block end of prior `match` (or `if-else if-else`)")); __result } { -/* : ┃ */ println!("before for loop"); -/* : ┃ - */ for index in { let __result = 0..10; __incr_cov(from!("block start")); __result } { -/* : : ┃ */ println!("top of `for` loop"); -/* : : ┃ - */ if { let __result = index == 8; __incr_cov(from!("block start")); __result } { -/* : : : ┃ */ println!("before break"); -/* : : : ┃ */ // note the following is not legal here: -/* : : : ┃ */ // "can only break with a value inside `loop` or breakable block" -/* : : : ┃ */ // break __incr_cov(from!("")); -/* : : : ┃ */ __incr_cov(from!("block start")); -/* : : ┏-----< */ break; -/* : : V : : */ -/* : : : : */ // FIXME(richkadel): add examples with loop labels, breaking out of inner and outer loop to outer loop label, with expression. -/* : : : : */ // May want to record both the span and the start position after the broken out block depdnding on label -/* : : ┃ - */ } -/* : : ┃ */ println!("after `break` test"); -/* : : ┃ - */ if { let __result = condition2; __incr_cov(from!("block end of `if index == 8`")); __result } { -/* ┏-:---:---:---< */ return { let __result = Ok(()); __incr_cov(from!("block start")); __result }; -/* V : : ┃ - */ } -/* : : ┃ */ -/* : : ┃ */ // BECAUSE THE PREVIOUS COVERAGE REGION HAS A `return`, THEN -/* : : ┃ */ // IF PREVIOUS COVERAGE REGION IS NOT COUNTED THEN OUTER REGION REACHED HERE. -/* : : ┃ */ // ADD A COVERAGE REGION FOR THE SPAN FROM JUST AFTER PREVIOUS REGION TO END -/* : : ┃ */ // OF OUTER SPAN, THEN TRUNCATE TO NEXT REGION NOT REACHED. -/* : : ┃ - */ if index % 3 == 2 { // NO __incr_cov() HERE BECAUSE NO STATEMENTS BETWEEN LAST CONDITIONAL BLOCK AND START OF THIS ONE -/* : : Λ : ┃ */ __incr_cov(from!("block end of `if condition2`")); -/* : : ┗-----< */ continue; -/* : : ┃ - */ } -/* : : ┃ */ println!("after `continue` test"); -/* : : ┃ */ // maybe add a runtime flag for a possible `return` here? -/* : : ┃ */ __incr_cov(from!("")); -/* : ┃ - */ } -/* : ┃ */ println!("after for loop"); -/* : ┃ */ let result = if { // START OF NEW CONDITIONAL EXPRESSION. NEXT "GUARANTEED" COUNTER SHOULD COUNT FROM END OF LAST CONDITIONAL EXPRESSION -/* : ┃ */ // A "GUARANTEED" COUNTER CALL IS ONE THAT WILL BE CALLED REGARDLESS OF OTHER CONDITIONS. THIS INCLUDES: -/* : ┃ */ // * A CONDITIONAL EXPRESSION THAT IS NOT A BLOCK (OR ANOTHER CONDITIONAL STATEMENT, WHICH WOULD CONTAIN A BLOCK) -/* : ┃ */ // * OR IF THE NEXT CONDITIONAL EXPRESSION IS A BLOCK OR CONDITIONAL STATEMENT, THEN THE FIRST "GUARANTEED" COUNTER IN THAT BLOCK -/* : ┃ */ // * END OF BLOCK IF THE BLOCK DOES NOT HAVE INNER CONDITIONAL EXPRESSIONS -/* : ┃ */ // * BRANCHING STATEMENTS (`return`, `break`, `continue`) BY EITHER WRAPPING THE BRANCH STATEMENT NON-BLOCK EXPRESSION, -/* : ┃ */ // OR PREPENDING A COUNTER WITH EMPTY TUPLE IF NO EXPRESSION, OR IF EXPRESSION IS A BLOCK, THEN THE NEXT "GUARANTEED" -/* : ┃ */ // COUNTER CALL WITHIN THAT BLOCK. -/* : ┃ */ // BASICALLY, CARRY THE START OF COVERAGE SPAN FORWARD UNTIL THE GUARANTEED COUNTER IS FOUND -/* : ┃ */ println!("after result = if ..."); -/* : ┃ - */ if { let __result = condition2; __incr_cov(from!("block end of `for` loop")); __result } { -/* : : ┃ */ println!("before first return"); -/* ┏-:---:-------< */ return { let __result = Ok(()); __incr_cov(from!("block start")); __result }; -/* V : : - */ } else if { let __result = condition3; __incr_cov(from!("`else`")); __result } { -/* : : ┃ */ // THE ABOVE COUNTER IS _NOT_ REALLY NECESSARY IF EXPRESSION IS GUARANTEED TO EXECUTE. -/* : : ┃ */ // IF WE GET COUNTER IN `else if` BLOCK WE COVERED EXPRESSION. -/* : : ┃ */ // IF WE GET TO ANY REMAINING `else` or `else if` BLOCK WE KNOW WE EVALUATED THIS CONDITION -/* : : ┃ */ // AND ALL OTHERS UP TO THE EXECUTED BLOCK. BUT THE SPAN WOULD HAVE "HOLES" FOR UNEXECUTED BLOCKS. -/* : : ┃ */ println!("not second return"); -/* ┏-:---:-------< */ return { let __result = Ok(()); __incr_cov(from!("block start")); __result }; -/* V : : - */ } else { -/* : : ┃ */ println!("not returning"); -/* : : ┃ */ { let __result = false; __incr_cov(from!("block start")); __result } -/* : : - */ } -/* : ┃ */ // NO COUNTER HERE BECAUSE NO STATEMENTS AFTER CONDITIONAL BLOCK -/* : ┃ - */ } { -/* : : ┃ */ println!("branched condition returned true"); -/* : : ┃ */ { let __result = Ok(()); __incr_cov(from!("")); __result } -/* : ┃ - */ } else if self.call_closure( -/* : : - */ |closure_param| { -/* : : ┃ - */ let __result = if condition3 { -/* : : : ┃ */ println!("in closure, captured condition said to print the param {}", closure_param); -/* : : : ┃ */ { let __result = false; __incr_cov(from!("")); __result } -/* : : ┃ - */ } else { -/* : : : ┃ */ println!("in closure, captured condition was false"); -/* : : : ┃ */ { let __result = true; __incr_cov(from!("")); __result } -/* : : ┃ - */ }; -/* : : - */ __incr_cov(from!("")); __result } -/* : : - */ ) { -/* : : ┃ */ println!("closure returned true"); -/* : : ┃ */ { let __result = Err(Error::new(ErrorKind::Other, "Result is error if closure returned true")); __incr_cov(from!("")); __result } -/* : ┃ - */ } else { -/* : : ┃ */ println!("closure returned false"); -/* : : ┃ */ { let __result = Err(Error::new(ErrorKind::Other, "Result is error if closure returned false")); __incr_cov(from!("")); __result } -/* : ┃ - */ }; -/* : ┃ */ println!("bottom of function might be skipped if early `return`"); -/* : ┃ */ { let __result = result; __incr_cov(from!("if condition1")); __result } -/* ┃ - */ } else { -/* : ┃ */ println!("skipping everything in `various()`"); -/* : ┃ */ { let __result = Ok(()); __incr_cov(from!("")); __result } -/* ┃ - */ } -/* ┃ - */ // __incr_cov(from!(""),0) // DO NOT COUNT IF NO STATEMENTS AFTER CONDITIONAL BLOCK. ALL COVERAGE IS ALREADY COUNTED -/* - */ } -/* */ } -/* */ -/* - */ fn main() -> Result<(), std::io::Error> { -/* ┃ */ //let mut status: u8 = 2; -/* ┃ */ let mut status: u8 = 1; -/* : - */ let result = if status < 2 && -/* : ┃ */ { let __result = { -/* : ┃ */ status -= 1; -/* : ┃ */ status == 0 -/* : - - */ }; __incr_cov(from!("")); __result } { -/* : ┃ */ let test_struct = TestStruct::new_with_value(100); -/* : ┃ */ let _ = test_struct.various(); -/* ┏-:---< */ return { let __result = Err(Error::new(ErrorKind::Other, format!("Error status {}", status))); __incr_cov(from!("")); __report(); __result } -/* V : - */ } else { -/* : ┃ */ let test_struct = TestStruct::new(); -/* : ┃ */ { let __result = test_struct.various(); __incr_cov(from!("")); __result } -/* : - */ }; -/* ┃ */ println!("done"); -/* ┃ */ { let __result = result; __incr_cov(from!("")); __report(); __result } -/* - */ } \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/coverage_injection_test_alt.rs b/src/test/codegen/coverage-experiments/src/coverage_injection_test_alt.rs deleted file mode 100644 index 20c4835dd882e..0000000000000 --- a/src/test/codegen/coverage-experiments/src/coverage_injection_test_alt.rs +++ /dev/null @@ -1,362 +0,0 @@ -/* */ use std::io::Error; -/* */ use std::io::ErrorKind; -/* */ -/* */ /// Align Rust counter increment with with: -/* */ /// [‘llvm.instrprof.increment’ Intrinsic](https://llvm.org/docs/LangRef.html#llvm-instrprof-increment-intrinsic) -/* */ /// -/* */ /// declare void @llvm.instrprof.increment(i8* , i64 , i32 , i32 ) -/* */ /// -/* */ /// The first argument is a pointer to a global variable containing the name of the entity -/* */ /// being instrumented. This should generally be the (mangled) function name for a set of -/* */ /// counters. -/* */ /// -/* */ /// The second argument is a hash value that can be used by the consumer of the profile data -/* */ /// to detect changes to the instrumented source, and the third is the number of counters -/* */ /// associated with name. It is an error if hash or num-counters differ between two -/* */ /// instances of instrprof.increment that refer to the same name. -/* */ /// -/* */ /// The last argument refers to which of the counters for name should be incremented. It -/* */ /// should be a value between 0 and num-counters. -/* */ /// -/* */ /// # Arguments -/* */ /// -/* */ /// `mangled_fn_name` - &'static ref to computed and injected static str, using: -/* */ /// -/* */ /// ``` -/* */ /// fn rustc_symbol_mangling::compute_symbol_name( -/* */ /// tcx: TyCtxt<'tcx>, -/* */ /// instance: Instance<'tcx>, -/* */ /// compute_instantiating_crate: impl FnOnce() -> CrateNum, -/* */ /// ) -> String -/* */ /// ``` -/* */ /// -/* */ /// `source_version_hash` - Compute hash based that only changes if there are "significant" -/* */ /// to control-flow inside the function. -/* */ /// -/* */ /// `num_counters` - The total number of counter calls [MAX(counter_index) + 1] within the -/* */ /// function. -/* */ /// -/* */ /// `counter_index` - zero-based counter index scoped by the function. (Ordering of -/* */ /// counters, relative to the source code location, is apparently not expected.) -/* */ /// -/* */ /// # Notes -/* */ /// -/* */ /// * The mangled_fn_name may not be computable until generics are monomorphized (see -/* */ /// parameters required by rustc_symbol_mangling::compute_symbol_name). -/* */ /// * The version hash may be computable from AST analysis, and may not benefit from further -/* */ /// lowering. -/* */ /// * num_counters depends on having already identified all counter insertion locations. -/* */ /// * counter_index can be computed at time of counter insertion (incrementally). -/* */ /// * Numeric parameters are signed to match the llvm increment intrinsic parameter types. -/* */ fn __lower_incr_cov(_mangled_fn_name: &'static str, _fn_version_hash: i64, _num_counters: i32, _counter_index: i32) { -/* */ } -/* */ -/* */ /// A coverage counter implementation that will work as both an intermediate coverage -/* */ /// counting and reporting implementation at the AST-level only--for debugging and -/* */ /// development--but also serves as a "marker" to be replaced by calls to LLVM -/* */ /// intrinsic coverage counter APIs during the lowering process. -/* */ /// -/* */ /// Calls to this function will be injected automatically into the AST. When LLVM intrinsics -/* */ /// are enabled, the counter function calls that were injected into the AST serve as -/* */ /// placeholders, to be replaced by an alternative, such as: -/* */ /// -/* */ /// * direct invocation of the `llvm.instrprof.increment()` intrinsic; or -/* */ /// * the `__lower_incr_cov()` function, defined above, that would invoke the -/* */ /// `llvm.instrprof.increment()` intrinsic; or -/* */ /// * a similar expression wrapper, with the additional parameters (as defined above -/* */ /// for `__lower_incr_cov()`, that invokes `llvm.instrprof.increment()` and returns the -/* */ /// result of the wrapped expression) -/* */ /// -/* */ /// The first two options would require replacing the inlined wrapper call with something -/* */ /// like: -/* */ /// -/* */ /// ``` -/* */ /// { let result = {expr}; __inlined_incr_cov(context, counter); result } -/* */ /// ``` -/* */ /// -/* */ /// But if the lowering process is already unwrapping the inlined call to `__incr_cov()`, then -/* */ /// it may be a perfect opportunity to replace the function with one of these more -/* */ /// direct methods. -/* */ /// -/* */ #[inline(always)] -/* */ pub fn __incr_cov(region_loc: &str, /*index: u32,*/) { -/* */ // Either call the intermediate non-llvm coverage counter API or -/* */ // replace the call to this function with the expanded `__lower_incr_cov()` call. -/* */ -/* */ // let _lock = increment_counter(counter); -/* */ println!("{}", region_loc); -/* */ } -/* */ -/* */ /// Write a report identifying each incremented counter and the number of times each counter -/* */ /// was incremented. -/* */ fn __report() { -/* */ println!("WRITE REPORT!"); -/* */ } -/* */ -/* */ /// Increment the counter after evaluating the wrapped expression (see `__incr_cov()`), then -/* */ /// write a report identifying each incremented counter and the number of times each counter -/* */ /// was incremented. -/* */ #[inline(always)] -/* */ pub fn __incr_cov_and_report(region_loc: &str, /*counter: u32,*/ result: T) -> T { -/* */ __incr_cov(region_loc, /*counter,*/); -/* */ __report(); -/* */ result -/* */ } -/* */ -/* */ macro_rules! from { -/* */ ($from:expr) => { &format!("from: {}\n to: {}:{}:{}", $from, file!(), line!(), column!()) }; -/* */ } -/* */ -/* */ macro_rules! to { -/* */ ($to:expr) => { &format!("to: {}\n to: {}:{}:{}", $to, file!(), line!(), column!()) }; -/* */ } -/* */ -/* */ #[derive(Debug)] -/* */ enum TestEnum { -/* */ Red, -/* */ Green, -/* */ Blue, -/* */ } -/* */ -/* */ struct TestStruct { -/* */ field: i32, -/* */ } -/* */ -/* */ // IMPORTANT! IS WRAPPING main() ENOUGH? OR DO I ALSO NEED TO WRAP THREAD FUNCTIONS, ASSUMING -/* */ // THEY ARE STILL RUNNING WITH MAIN EXITS? (IF THEY CAN). NOT SURE HOW RUST HANDLES THAT. -/* */ -/* */ // I SUSPECT USING THREAD_LOCAL COUNTERS MAY NOT ACTUALLY BE AN OPTIMIZATION OVER MUTEX LOCKS, -/* */ // BUT MAYBE I SHOULD ASK. -/* */ -/* */ impl TestStruct { -/* - */ fn new() -> Self { -/* ┃ */ __incr_cov(to!("end of fn new()")); // function-scoped counter index = 0 -/* ┃ */ Self::new_with_value(31415) -/* - */ } -/* */ -/* - */ fn new_with_value(field: i32) -> Self { -/* ┃ */ __incr_cov(to!("end of fn new_with_value()")); // function-scoped counter index = 0 -/* ┃ */ Self { -/* ┃ */ field, -/* ┃ */ } -/* - */ } -/* */ -/* */ fn call_closure(&self, closure: F) -> bool -/* */ where -/* */ F: FnOnce( -/* */ i32, -/* */ ) -> bool, -/* - */ { -/* ┃ */ __incr_cov(to!("end of fn call_closure()")); // function-scoped counter index = 0 -/* ┃ */ closure(123) -/* - */ } -/* */ -/* - */ fn various(&self) -> Result<(),Error> { -/* ┃ */ __incr_cov(to!("just before next branch: after `match color`: pattern selection")); -/* ┃ */ use TestEnum::*; -/* ┃ */ let mut color = Red; -/* ┃ */ let _ = color; -/* ┃ */ color = Blue; -/* ┃ */ let _ = color; -/* ┃ */ color = Green; -/* ┃ */ match color { // function-scoped counter index = 0 -/* : */ -/* : */ // !!! RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK (THE FUNCTION IN THIS CASE) TO END OF MATCH EXPRESSION -/* : */ // If `match`, `while`, `loop`, `for`, `if`, etc. expression has a `return`, `break`, or `continue` -/* : */ // (if legal), then RECORD SPAN FROM START OF INNERMOST CONTAINING BLOCK TO END OF `return` EXPRESSION -/* : */ // If the expression includes lazy booleans, nest calls to `__incr_cov()`. -/* : - */ Red => { -/* : ┃ */ __incr_cov(to!("end of matched Red")); -/* : ┃ */ println!("roses"); -/* : - */ } -/* : - */ Green => { -/* : ┃ */ __incr_cov(to!("just before next branch: after `if spidey > goblin`")); -/* : ┃ */ let spidey = 100; -/* : ┃ */ let goblin = 50; -/* : ┃ */ // if spidey > goblin {__incr_cov(from!(""),{ -/* : ┃ */ // println!("what ev"); -/* : ┃ */ // })} -/* : ┃ */ // ACTUALLY, WRAPPING THE ENTIRE IF BLOCK IN `__incr_cov` IS NOT A GREAT GENERAL RULE. -/* : ┃ */ // JUST INSERTING A `return`, `break`, or `continue` IN THAT BLOCK (without an intermediate condition) -/* : ┃ */ // MAKES THE `__incr_cov()` CALL UNREACHABLE! -/* : ┃ */ // MY ORIGINAL SOLUTION WORKS BETTER (WRAP LAST EXPRESSION OR AFTER LAST SEMICOLON STATEMENT IN BLOCK) -/* : ┃ */ // UNLESS THE EXPRESSION IS NOT A BLOCK. -/* : ┃ - */ if spidey > goblin { -/* : : ┃ */ __incr_cov(to!("end of if block, if no earlier branch in this scope")); -/* : : ┃ */ println!("spidey beats goblin"); -/* : : ┃ */ -/* : ┃ - */ } else if { -/* : : : */ // Make sure we can't compute the coverage count here. -/* : : : */ // We know the expression executed if the previous if block DID NOT -/* : : : */ // execute, and either this `else if` block does execute OR any subsequent -/* : : : */ // `else if` or `else` blocks execute, OR none of the blocks in the -/* : : : */ // `if`, `else if` or `else` blocks execute. -/* : : : */ // `if`, `else if` or `else` blocks execute. -/* : : ┃ */ __incr_cov(to!("end of `else if spidey == goblin` expression")); -/* : : ┃ */ spidey == goblin -/* : ┃ - */ } { -/* : : ┃ */ __incr_cov(to!("end of if block, if no earlier branch in this scope")); -/* : : ┃ */ // COVERAGE NOTE: Do we mark only the expression span (that may be trivial, as in this case), -/* : : ┃ */ // or associate it with the outer block, similar to how the `if` expression is associated with -/* : : ┃ */ // the outer block? (Although it is a continuation, in a sense, it is discontiguous in this case, -/* : : ┃ */ // so I think simpler to just make it its own coverage region.) -/* : : ┃ */ println!("it's a draw"); -/* : : ┃ */ -/* : ┃ - - - */ } else if { -/* : : ┃ */ __incr_cov(to!("end of `if true`")); -/* : ┃ - - - */ if true { -/* : : : ┃ */ __incr_cov(to!("end of `return Ok(())`")); -/* ┏-:---:-------:---< */ return Ok(()); -/* V : : ┃ - */ } else { -/* : : : ┃ */ // __incr_cov(to!("end of else block")); -/* : : : ┃ */ // computed counter expression -/* : : : ┃ */ false -/* : : : - */ } -/* : : - - - */ } { -/* : : ┃ */ __incr_cov(to!("end of if block")); -/* : : ┃ */ println!("wierd science"); -/* : ┃ - */ } else { -/* : : ┃ */ // __incr_cov(to!("end of `return Ok(())")); -/* : : ┃ */ // counter expression: (start of Green match arm) - (if spidey > goblin) - (previous `} else if {`) -/* : : ┃ */ println!("goblin wins"); -/* ┏-:---:---< */ return Ok(()); // THIS COUNTS LAST STATEMENT IN `else` BLOCK -/* V : : : */ // COVERAGE NOTE: When counting the span for `return`, -/* : : : */ // `break`, or `continue`, also report the outer spans -/* : : : */ // got this far--including this `else` block. Record -/* : : : */ // The start positions for those outer blocks, but: -/* : : : */ // * For the block containing the `return`, `break`, or -/* : : : */ // `continue`, end report the end position is the -/* : : : */ // start of the `return` span (or 1 char before it). -/* : : : */ // * Anything else? -/* : ┃ - */ } -/* : : */ // __incr_cov(to!("end of matched Green")); -/* : : */ // // DO NOT COUNT HERE IF NO STATEMENTS AFTER LAST `if` or `match` -/* : - */ }, -/* : - */ Blue => { -/* : ┃ */ __incr_cov(to!("end of matched Blue")); -/* : ┃ */ println!("violets"); -/* : - */ } -/* ┃ */ } -/* ┃ */ __incr_cov(to!("just before next branch: after `if condition1` (HIR: 'match condition1')")); -/* ┃ */ -/* ┃ */ let condition1 = true; -/* ┃ */ let condition2 = false; -/* ┃ */ let condition3 = true; -/* ┃ */ -/* ┃ */ println!("Called `various()` for TestStruct with field={}", self.field); -/* ┃ */ -/* ┃ - */ if condition1 { -/* : ┃ */ println!("before while loop"); -/* : ┃ */ let mut countdown = 10; -/* : ┃ */ // Must increment before repeated while text expression -/* : : I */ while countdown > 0 { // span is just the while test expression -/* : : ┃ */ println!("top of `while` loop"); -/* : : ┃ */ countdown -= 1; -/* : : ┃ */ // // Counter not needed, but span is computed as "while test" minus "block start" -/* : : ┃ */ // If test expression is 11, and the outer block runs only once, 11-1 = 10 -/* : ┃ - */ } -/* : ┃ */ println!("before for loop"); -/* : ┃ - */ for index in 0..10 { -/* : : ┃ */ println!("top of `for` loop"); -/* : : ┃ - */ if index == 8 { -/* : : : ┃ */ println!("before break"); -/* : : : ┃ */ // note the following is not legal here: -/* : : : ┃ */ // "can only break with a value inside `loop` or breakable block" -/* : : : ┃ */ // break -/* : : : ┃ */ -/* : : ┏-----< */ break; -/* : : V : : */ -/* : : : : */ // FIXME(richkadel): add examples with loop labels, breaking out of inner and outer loop to outer loop label, with expression. -/* : : : : */ // May want to record both the span and the start position after the broken out block depdnding on label -/* : : ┃ - */ } -/* : : ┃ */ println!("after `break` test"); -/* : : ┃ - */ if condition2 { -/* ┏-:---:---:---< */ return Ok(()); -/* V : : ┃ - */ } -/* : : ┃ */ -/* : : ┃ */ // BECAUSE THE PREVIOUS COVERAGE REGION HAS A `return`, THEN -/* : : ┃ */ // IF PREVIOUS COVERAGE REGION IS NOT COUNTED THEN OUTER REGION REACHED HERE. -/* : : ┃ */ // ADD A COVERAGE REGION FOR THE SPAN FROM JUST AFTER PREVIOUS REGION TO END -/* : : ┃ */ // OF OUTER SPAN, THEN TRUNCATE TO NEXT REGION NOT REACHED. -/* : : ┃ - */ if index % 3 == 2 { // NO __incr_cov() HERE BECAUSE NO STATEMENTS BETWEEN LAST CONDITIONAL BLOCK AND START OF THIS ONE -/* : : Λ : ┃ */ -/* : : ┗-----< */ continue; -/* : : ┃ - */ } -/* : : ┃ */ println!("after `continue` test"); -/* : : ┃ */ // maybe add a runtime flag for a possible `return` here? -/* : : ┃ */ -/* : ┃ - */ } -/* : ┃ */ println!("after for loop"); -/* : ┃ */ let result = if { // START OF NEW CONDITIONAL EXPRESSION. NEXT "GUARANTEED" COUNTER SHOULD COUNT FROM END OF LAST CONDITIONAL EXPRESSION -/* : ┃ */ // A "GUARANTEED" COUNTER CALL IS ONE THAT WILL BE CALLED REGARDLESS OF OTHER CONDITIONS. THIS INCLUDES: -/* : ┃ */ // * A CONDITIONAL EXPRESSION THAT IS NOT A BLOCK (OR ANOTHER CONDITIONAL STATEMENT, WHICH WOULD CONTAIN A BLOCK) -/* : ┃ */ // * OR IF THE NEXT CONDITIONAL EXPRESSION IS A BLOCK OR CONDITIONAL STATEMENT, THEN THE FIRST "GUARANTEED" COUNTER IN THAT BLOCK -/* : ┃ */ // * END OF BLOCK IF THE BLOCK DOES NOT HAVE INNER CONDITIONAL EXPRESSIONS -/* : ┃ */ // * BRANCHING STATEMENTS (`return`, `break`, `continue`) BY EITHER WRAPPING THE BRANCH STATEMENT NON-BLOCK EXPRESSION, -/* : ┃ */ // OR PREPENDING A COUNTER WITH EMPTY TUPLE IF NO EXPRESSION, OR IF EXPRESSION IS A BLOCK, THEN THE NEXT "GUARANTEED" -/* : ┃ */ // COUNTER CALL WITHIN THAT BLOCK. -/* : ┃ */ // BASICALLY, CARRY THE START OF COVERAGE SPAN FORWARD UNTIL THE GUARANTEED COUNTER IS FOUND -/* : ┃ */ println!("after result = if ..."); -/* : ┃ - */ if condition2 { -/* : : ┃ */ println!("before first return"); -/* ┏-:---:-------< */ return Ok(()); -/* V : : - */ } else if condition3 { -/* : : ┃ */ // THE ABOVE COUNTER IS _NOT_ REALLY NECESSARY IF EXPRESSION IS GUARANTEED TO EXECUTE. -/* : : ┃ */ // IF WE GET COUNTER IN `else if` BLOCK WE COVERED EXPRESSION. -/* : : ┃ */ // IF WE GET TO ANY REMAINING `else` or `else if` BLOCK WE KNOW WE EVALUATED THIS CONDITION -/* : : ┃ */ // AND ALL OTHERS UP TO THE EXECUTED BLOCK. BUT THE SPAN WOULD HAVE "HOLES" FOR UNEXECUTED BLOCKS. -/* : : ┃ */ println!("not second return"); -/* ┏-:---:-------< */ return Ok(()); -/* V : : - */ } else { -/* : : ┃ */ println!("not returning"); -/* : : ┃ */ false -/* : : - */ } -/* : ┃ */ // NO COUNTER HERE BECAUSE NO STATEMENTS AFTER CONDITIONAL BLOCK -/* : ┃ - */ } { -/* : : ┃ */ println!("branched condition returned true"); -/* : : ┃ */ Ok(()) -/* : ┃ - */ } else if self.call_closure( -/* : : - */ |closure_param| -/* : : ┃ - */ if condition3 { -/* : : : ┃ */ println!("in closure, captured condition said to print the param {}", closure_param); -/* : : : ┃ */ false -/* : : ┃ - */ } else { -/* : : : ┃ */ println!("in closure, captured condition was false"); -/* : : : ┃ */ true -/* : : ┃ - */ } -/* : : - */ -/* : : - */ ) { -/* : : ┃ */ println!("closure returned true"); -/* : : ┃ */ Err(Error::new(ErrorKind::Other, "Result is error if closure returned true")) -/* : ┃ - */ } else { -/* : : ┃ */ println!("closure returned false"); -/* : : ┃ */ Err(Error::new(ErrorKind::Other, "Result is error if closure returned false")) -/* : ┃ - */ }; -/* : ┃ */ println!("bottom of function might be skipped if early `return`"); -/* : ┃ */ result -/* ┃ - */ } else { -/* : ┃ */ println!("skipping everything in `various()`"); -/* : ┃ */ Ok(()) -/* ┃ - */ } -/* ┃ - */ // 0 // DO NOT COUNT IF NO STATEMENTS AFTER CONDITIONAL BLOCK. ALL COVERAGE IS ALREADY COUNTED -/* - */ } -/* */ } -/* */ -/* - */ fn main() -> Result<(), std::io::Error> { -/* ┃ */ //let mut status: u8 = 2; -/* ┃ */ let mut status: u8 = 1; -/* : - */ let result = if status < 2 && -/* : ┃ */ { -/* : ┃ */ status -= 1; -/* : ┃ */ status == 0 -/* : - - */ } { -/* : ┃ */ let test_struct = TestStruct::new_with_value(100); -/* : ┃ */ let _ = test_struct.various(); -/* ┏-:---< */ return __incr_cov_and_report(from!(""),Err(Error::new(ErrorKind::Other, format!("Error status {}", status)))) -/* V : - */ } else { -/* : ┃ */ let test_struct = TestStruct::new(); -/* : ┃ */ test_struct.various() -/* : - */ }; -/* ┃ */ println!("done"); -/* ┃ */ __incr_cov_and_report(from!(""),result) // function-scoped counter index = 0 -/* - */ } \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/drop_trait.rs b/src/test/codegen/coverage-experiments/src/drop_trait.rs deleted file mode 100644 index 75400e037e9f0..0000000000000 --- a/src/test/codegen/coverage-experiments/src/drop_trait.rs +++ /dev/null @@ -1,25 +0,0 @@ -#[inline(always)] -pub fn __incr_cov(_region_loc: &str, result: T) -> T { - result -} - -struct Firework { - _strength: i32, -} - -impl Drop for Firework { - fn drop(&mut self) { - __incr_cov("start of drop()", ()); - } -} - -fn main() -> Result<(),u8> { - let _firecracker = Firework { _strength: 1 }; - - if __incr_cov("start of main()", true) { - return __incr_cov("if true", { let _t = Err(1); _t }); - } - - let _tnt = Firework { _strength: 100 }; - Ok(()) -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/drop_trait_with_comments_prints.rs b/src/test/codegen/coverage-experiments/src/drop_trait_with_comments_prints.rs deleted file mode 100644 index de9f5d5cb4647..0000000000000 --- a/src/test/codegen/coverage-experiments/src/drop_trait_with_comments_prints.rs +++ /dev/null @@ -1,53 +0,0 @@ -// -// -// -// It's interesting to speculate if there is a way to leverage the Drop trait functionality -// to increment counters when a scope is closed, but I don't think it would help "out of the box". -// -// A `return` or `break` with expression might not need a temp value expression wrapper -// such as `return { let _t = result_expression; __incr_counter(...); _t };` -// -// ... **if** the __incr_counter() was somehow called from a "drop()" trait function. -// -// The problem is, since the drop call is automatic, there is no way to have argument variants -// depending on where the drop() occurs (e.g., from a `return` statement vs. from the end of -// the function). We need 2 different code regions though. -// -// -// -// - -#[inline(always)] -pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { - // println!("from: {}", _region_loc); - result -} - -struct Firework { - strength: i32, -} - -impl Drop for Firework { - fn drop(&mut self) { - println!("BOOM times {}!!!", self.strength); - __incr_cov("start of drop()", ()); - } -} - -fn main() -> Result<(),u8> { - let _firecracker = Firework { strength: 1 }; - - if __incr_cov("start of main()", true) { - return __incr_cov("if true", { let _t = Err(1); println!("computing return value"); _t }); - } - - let _tnt = Firework { strength: 100 }; - // __incr_cov("after if block", Ok(())) // CAN USE COUNTER EXPRESSION: "start of drop()" - "if true" - Ok(()) -} - -// OUTPUT WHEN RUNNING THIS PROGRAM IS AS EXPECTED: - -// computing return value -// BOOM times 1!!! -// Error: 1 diff --git a/src/test/codegen/coverage-experiments/src/for.rs b/src/test/codegen/coverage-experiments/src/for.rs deleted file mode 100644 index 3f44c382a1e3f..0000000000000 --- a/src/test/codegen/coverage-experiments/src/for.rs +++ /dev/null @@ -1,41 +0,0 @@ -#[inline(always)] -pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { - result -} - -fn main() { - for countdown in __incr_cov("start", 10..0) { - let _ = countdown; - __incr_cov("top of for", ()); - } -} - -// LOWERED TO HIR: -// -// fn main() { -// { -// let _t = -// match ::std::iter::IntoIterator::into_iter(__incr_cov("start", -// ::std::ops::Range{start: -// 10, -// end: -// 0,})) -// { -// mut iter => -// loop { -// let mut __next; -// match ::std::iter::Iterator::next(&mut iter) { -// ::std::option::Option::Some(val) => -// __next = val, -// ::std::option::Option::None => break , -// } -// let countdown = __next; -// { -// let _ = countdown; -// __incr_cov("top of for", ()); -// } -// }, -// }; -// _t -// } -// } \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/for_with_comments.rs b/src/test/codegen/coverage-experiments/src/for_with_comments.rs deleted file mode 100644 index 03d11b2c230ca..0000000000000 --- a/src/test/codegen/coverage-experiments/src/for_with_comments.rs +++ /dev/null @@ -1,24 +0,0 @@ -/* */ #[inline(always)] -/* */ pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { -/* */ result -/* */ } -/* */ -/* - */ fn main() { -/* : I */ for countdown in __incr_cov("start", 10..0) { // span is just the while test expression -/* : ┃ */ let _ = countdown; -/* : ┃ */ __incr_cov("top of for", ()); -/* ┃ - */ } -/* - */ } - - -// -Z unpretty=val -- present the input source, unstable (and less-pretty) variants; -// valid types are any of the types for `--pretty`, as well as: -// `expanded`, `expanded,identified`, -// `expanded,hygiene` (with internal representations), -// `everybody_loops` (all function bodies replaced with `loop {}`), -// `hir` (the HIR), `hir,identified`, -// `hir,typed` (HIR with types for each node), -// `hir-tree` (dump the raw HIR), -// `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR) - -// argument to `pretty` must be one of `normal`, `expanded`, `identified`, or `expanded,identified` diff --git a/src/test/codegen/coverage-experiments/src/if.rs b/src/test/codegen/coverage-experiments/src/if.rs deleted file mode 100644 index ad50f6be19004..0000000000000 --- a/src/test/codegen/coverage-experiments/src/if.rs +++ /dev/null @@ -1,80 +0,0 @@ -#![feature(core_intrinsics)] - -pub fn __llvm_incr_counter(_region_loc: &str) { -} - -#[inline(always)] -pub fn __incr_cov(region_loc: &str, result: T) -> T { - __llvm_incr_counter(region_loc); - result -} - -static TEST_FUNC_NAME: &'static [u8; 6] = b"main()"; - -fn main() { - let mut countdown = 10; - if __incr_cov("start", countdown > 0) { - - - // // TEST CALLING INTRINSIC: - unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 314 as u32, 31 as u32) }; - // // Results in: - // // LLVM ERROR: Cannot select: intrinsic %llvm.instrprof.increment - // // I may need to pass one or more of the following flags (or equivalent opts) to LLVM to enable this: - // // -fprofile-instr-generate -fcoverage-mapping - - - countdown -= 1; - __incr_cov("if block",()); - } else if countdown > 5 { - countdown -= 2; - __incr_cov("else if block",()); - } else { - countdown -= 3; - } - - let mut countdown = 10; - if { let _tcov = countdown > 0; __llvm_incr_counter("start", ); _tcov } { - countdown -= 1; - __incr_cov("if block",()); - } else if countdown > 5 { - countdown -= 2; - __incr_cov("else if block",()); - } else { - countdown -= 3; - } -} - -// NOTE: hir REDUNDANTLY lowers the manually inlined counter in the second if block to: -// -// match { -// let _t = -// { -// let _tcov = countdown > 0; -// __llvm_incr_counter("start"); -// _tcov -// }; -// _t -// } { - -// I don't know if optimization phases will fix this or not. -// Otherwise, a more optimal (but definitely special case) way to handle this would be -// to inject the counter between the hir-introduced temp `_t` assignment and the block result -// line returning `_t`: -// -// match { -// let _t = countdown > 0; -// __llvm_incr_counter("start"); // <-- the only thing inserted for coverage here -// _t -// } -// -// UNFORTUNATELY THIS IS NOT A PATTERN WE CAN ALWAYS LEVERAGE, FOR EXPRESSIONS THAT HAVE VALUES -// WHERE WE NEED TO INJECT THE COUNTER AFTER THE EXPRESSION BUT BEFORE IT IS USED. -// -// IT DOES APPEAR TO BE THE CASE FOR WHILE EXPRESSIONS, (BECOMES loop { match { let _t = condition; _t} { true => {...} _ => break, }}) -// AND IS TRUE FOR IF EXPRESSIONS AS NOTED -// BUT NOT FOR RETURN STATEMENT (and I'm guessing not for loop { break value; } ? ) -// -// AND NOT FOR LAZY BOOLEAN EXPRESSIONS! -// -// AND NOT FOR MATCH EXPRESSIONS IN THE ORIGINAL SOURCE! \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/if_with_comments.rs b/src/test/codegen/coverage-experiments/src/if_with_comments.rs deleted file mode 100644 index 267e7bca2c5a2..0000000000000 --- a/src/test/codegen/coverage-experiments/src/if_with_comments.rs +++ /dev/null @@ -1,39 +0,0 @@ -/* */ #[inline(always)] -/* */ pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { -/* */ result -/* */ } -/* */ -/* - */ fn main() { -/* ┃ */ let mut countdown = 10; -/* : I */ if __incr_cov("start", countdown > 0) { // span is from start of main() -/* : ┃ */ countdown -= 1; -/* : ┃ */ __incr_cov("if block",()); -/* ┃ - */ } - - let mut countdown = 10; - if __incr_cov("start", countdown > 0) { - countdown -= 1; - __incr_cov("if block",()); - } else if countdown > 5 { // counter expression "start" - "if block" - countdown -= 2; - __incr_cov("else if block",()); - } else { - countdown -= 3; - // __incr_cov("else block",()); // counter expression (countdown > 5 counter expression) - "else if block" - // PLACED AT END OF ELSE BLOCK OR START OF FIRST CONDITIONAL BLOCK, IF ANY (PRESUMING POSSIBLE EARLY EXIT). - // IF WE CAN GUARANTEE NO EARLY EXIT IN THIS BLOCK, THEN AT THE END IS FINE EVEN IF ELSE BLOCK CONTAINS OTHER CONDITIONS. - } - -/* - */ } - -// -Z unpretty=val -- present the input source, unstable (and less-pretty) variants; -// valid types are any of the types for `--pretty`, as well as: -// `expanded`, `expanded,identified`, -// `expanded,hygiene` (with internal representations), -// `everybody_loops` (all function bodies replaced with `loop {}`), -// `hir` (the HIR), `hir,identified`, -// `hir,typed` (HIR with types for each node), -// `hir-tree` (dump the raw HIR), -// `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR) - -// argument to `pretty` must be one of `normal`, `expanded`, `identified`, or `expanded,identified` diff --git a/src/test/codegen/coverage-experiments/src/increment_intrinsic.rs b/src/test/codegen/coverage-experiments/src/increment_intrinsic.rs deleted file mode 100644 index d4708cd367ff6..0000000000000 --- a/src/test/codegen/coverage-experiments/src/increment_intrinsic.rs +++ /dev/null @@ -1,11 +0,0 @@ -#![feature(core_intrinsics)] - -pub fn not_instrprof_increment(_hash: u64, _num_counters: u32, _index: u32) { -} - -fn main() { - // COMPARE THIS WITH INTRINSIC INSERTION - //not_instrprof_increment(1234 as u64, 314 as u32, 31 as u32); - - unsafe { core::intrinsics::instrprof_increment(1234 as u64, 314 as u32, 31 as u32) }; -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/just_main.rs b/src/test/codegen/coverage-experiments/src/just_main.rs deleted file mode 100644 index 081e5d72a6e0a..0000000000000 --- a/src/test/codegen/coverage-experiments/src/just_main.rs +++ /dev/null @@ -1,3 +0,0 @@ -fn main() { - println!("hello world! (should be covered)"); -} diff --git a/src/test/codegen/coverage-experiments/src/lazy_boolean.rs b/src/test/codegen/coverage-experiments/src/lazy_boolean.rs deleted file mode 100644 index 263277c7cdc4d..0000000000000 --- a/src/test/codegen/coverage-experiments/src/lazy_boolean.rs +++ /dev/null @@ -1,17 +0,0 @@ -pub fn __llvm_incr_counter(_region_loc: &str) { -} - -#[inline(always)] -pub fn __incr_cov(region_loc: &str, result: T) -> T { - __llvm_incr_counter(region_loc); - result -} - -fn main() { - let a = 1; - let b = 10; - let c = 100; - let _result = __incr_cov("start", a < b) || __incr_cov("or", b < c); - - let _result = { let _t = a < b; __llvm_incr_counter("start"); _t } || { let _t = b < c; __llvm_incr_counter("start"); _t }; -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/loop_break_value.rs b/src/test/codegen/coverage-experiments/src/loop_break_value.rs deleted file mode 100644 index 76caa833ec4f8..0000000000000 --- a/src/test/codegen/coverage-experiments/src/loop_break_value.rs +++ /dev/null @@ -1,15 +0,0 @@ -pub fn __llvm_incr_counter(_region_loc: &str) { -} - -#[inline(always)] -pub fn __incr_cov(region_loc: &str, result: T) -> T { - __llvm_incr_counter(region_loc); - result -} - -fn main() { - __incr_cov("start", ()); - let _result = loop { - break __incr_cov("top of loop", true); - }; -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match.rs b/src/test/codegen/coverage-experiments/src/match.rs deleted file mode 100644 index afbb20888eab5..0000000000000 --- a/src/test/codegen/coverage-experiments/src/match.rs +++ /dev/null @@ -1,22 +0,0 @@ -pub fn __llvm_incr_counter(_region_loc: &str) { -} - -#[inline(always)] -pub fn __incr_cov(region_loc: &str, result: T) -> T { - __llvm_incr_counter(region_loc); - result -} - -fn main() { - let a = 1; - let b = 10; - let _result = match a < b { - true => true, - _ => false, - }; - - let _result = match __incr_cov("end of first match", a < b) { - true => __incr_cov("matched true", true), - _ => false, // counter expression "end of first match" - "matched true" - }; -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match_with_increment.rs b/src/test/codegen/coverage-experiments/src/match_with_increment.rs deleted file mode 100644 index f618b37ed5247..0000000000000 --- a/src/test/codegen/coverage-experiments/src/match_with_increment.rs +++ /dev/null @@ -1,305 +0,0 @@ -#![feature(core_intrinsics)] -//static TEST_FUNC_NAME: &'static [u8; 7] = b"main()\0"; - static TEST_FUNC_NAME: &'static [u8; 6] = b"main()"; -fn main() { - let a = 1; - let b = 10; - let _result = match { - let _t = a < b; - unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 3 as u32, 0 as u32) }; - _t - } { - true => { - let _t = true; - unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 3 as u32, 1 as u32) }; - _t - } - _ => false, - }; -} - -/* - -I NEED TO INSERT THE instrprof_increment() CALL: - - 1. JUST BEFORE THE switchInt(_4) (because we haven't counted entering the function main() yet, deferring that to "JUST BEFORE FIRST BRANCH") - 2. SOME TIME AFTER THE switchInt(_4), AND JUST BEFORE ANOTHER BRANCH (in this case, before "goto") - 2.a. NOT BEFORE BOTH GOTO'S AFTER switchInt(_4) (because one can be calculated by counter expression), BUT PERHAPS INSERT A noop PLACEHOLDER - AS A MARKER TO INCLUDE THE COVERAGE REGION AND REFERENCE THE COUNTERS TO BE SUBTRACTED (AND/OR SUMMED)? - - WHY DEFER INSERTING COUNTERS TO "JUST BEFORE FIRST BRANCH"? We can ignore panic/unwind() and only count if the coverage region ACTUALLY - executed in entirety. BUT IS THAT NECESSARY? IS IT MUCH EASIER TO INSERT COUNTERS AT THE TOP OF A REGION THAT MUST EXECUTE IN ENTIRETY IF - PANIC DOES NOT OCCUR? AND WHAT IF WE ADD SUPPORT FOR PANIC UNWIND (later)? - - IS THERE A BENEFIT OF THE DEFERRED APPROACH WHEN CONSIDERING EXPRESSIONS MAY HAVE EARLY RETURNS? (BECAUSE, WE STILL NEED TO COUNT THE REGION - LEADING UP TO THE EXPRESSION ANYWAY) - -================================================= -================================================= - -To inject an intrinsic after computing a final expression value of a coverage region: - -Replace the following basic block end (last statement plus terminator): - -... ... -StorageLive(_4) -StorageLive(_5) -_5 = _1 -StorageLive(_6) -_6 = _2 -_4 = Lt(move _5, move _6) -StorageDead(_6) -StorageDead(_5) - <------ to insert instrprof_increment() here -FakeRead(ForMatchedPlace, _4) --------------------------------------------------------------------------------------- -switchInt(_4) - - -================================================= -Insert call to intrinsic with: - -StorageLive(_4) # _4 is now meant for deferred FakeRead(ForMatchdPlace, _4) in BasicBlock after increment() call -StorageLive(_5) # Unchanged except _4 is now _5 -StorageLive(_6) # Unchanged except _5 is now _6 -_6 = _1 # Unchanged except _5 is now _6 -StorageLive(_7) # Unchanged except _6 is now _7 -_7 = _2 # Unchanged except _6 is now _7 -_5 = Lt(move _6, move _7) # Unchanged except _4, _5, _6 is now _5, _6, _7 -StorageDead(_7) # Unchanged except _6 is now _7 -StorageDead(_6) # Unchanged except _5 is now _6 - -FakeRead(ForLet, _5) # CHANGED ForMatchedPlace to ForLet - -> # ALL NEW AND NECESSARY TO CALL instrprof_increment() -> StorageLive(_8) # ?? stores function pointer to instrprof_increment function? -> StorageLive(_9) -> StorageLive(_10) -> StorageLive(_11) -> _11 = const {alloc1+0: &&[u8; 6]} -> _10 = &raw const (*(*_11)) -> _9 = move _10 as *const u8 (Pointer(ArrayToPointer)) -> StorageDead(_10) -> StorageLive(_12) -> _12 = const 1234u64 -> StorageLive(_13) -> _13 = const 3u32 -> StorageLive(_14) -> _14 = const 0u32 -> -------------------------------------------------------------------------------------- -> _8 = const std::intrinsics::instrprof_increment(move _9, move _12, move _13, move _14) -> -> -> return -> -> StorageDead(_14) -> StorageDead(_13) -> StorageDead(_12) -> StorageDead(_9) -> StorageDead(_11) -> StorageDead(_8) - -_4 = _5 # ARE THESE LINES REDUNDANT? CAN I JUST PASS _5 DIRECTLY TO FakeRead()? -StorageDead(_5) # DROP "_t" temp result of `let _t = a < b` - # (NOTE THAT IF SO, I CAN REMOVE _5 altogether, and use _4, which coincidentally makes less changes) - # SEE BELOW - -FakeRead(ForMatchedPlace, _4) # Unchanged --------------------------------------------------------------------------------------- -switchInt(_4) # Unchanged - - -================================================= -Can I skip the extra variable and insert call to intrinsic with: - -StorageLive(_4) # Unchanged -StorageLive(_5) # Unchanged -_5 = _1 # Unchanged -StorageLive(_6) # Unchanged -_6 = _2 # Unchanged -_4 = Lt(move _5, move _6) # Unchanged -StorageDead(_6) # Unchanged -StorageDead(_5) # Unchanged - -> # ALL NEW AND NECESSARY TO CALL instrprof_increment() -> FakeRead(ForLet, _4) # Save the post-increment result in temp "_t" -> StorageLive(_8) # ?? stores function pointer to instrprof_increment function? -> StorageLive(_9) -> StorageLive(_10) -> StorageLive(_11) -> _11 = const {alloc1+0: &&[u8; 6]} -> _10 = &raw const (*(*_11)) -> _9 = move _10 as *const u8 (Pointer(ArrayToPointer)) -> StorageDead(_10) -> StorageLive(_12) -> _12 = const 1234u64 -> StorageLive(_13) -> _13 = const 3u32 -> StorageLive(_14) -> _14 = const 0u32 -> -------------------------------------------------------------------------------------- -> _8 = const std::intrinsics::instrprof_increment(move _9, move _12, move _13, move _14) -> -> -> return -> -> StorageDead(_14) -> StorageDead(_13) -> StorageDead(_12) -> StorageDead(_9) -> StorageDead(_11) -> StorageDead(_8) - -FakeRead(ForMatchedPlace, _4) # Unchanged (PREVIOUSLY USED IN FakeRead(ForLet), is that OK?) --------------------------------------------------------------------------------------- -switchInt(_4) # Unchanged - - - - - -================================================= -================================================= - -For the second inserted call to instrprof_increment, without that call we have: - --------------------------------------------------------------------------------------- -switchInt(_4) # From above - --> otherwise # that is, "NOT false" - -_3 = const true - <------ to insert instrprof_increment() here --------------------------------------------------------------------------------------- -goto - --> # No label. No condition, and not a "return" - -FakeRead(ForLet, _3) # NOTE: Unused result -StorageDead(_4) -_0 = () -StorageDead(_3) -StorageDead(_2) -StorageDead(_1) --------------------------------------------------------------------------------------- -goto - --> # No label. No condition, and not a "return" - -return # from main() - - -================================================= -With the call to increment(): - --------------------------------------------------------------------------------------- -switchInt(_4) # From above - --> otherwise # "NOT false" # UNCHANGED - -StorageLive(_15) # CHANGED! Allocated new storage (_15) for the result of match, if true. -_15 = const true # UNCHANGED except _3 is now _15 -FakeRead(ForLet, _15) # CHANGED! Assign value to temporary (to be assigned to _3 later) ... Do I need to do this? - -> # ALL NEW AND NECESSARY TO CALL instrprof_increment() -> StorageLive(_16) # pointer to instrprof_increment() function ? -> StorageLive(_17) -> StorageLive(_18) -> StorageLive(_19) -> _19 = const {alloc1+0: &&[u8; 6]} -> _18 = &raw const (*(*_19)) -> _17 = move _18 as *const u8 (Pointer(ArrayToPointer)) -> StorageDead(_18) -> StorageLive(_20) -> _20 = const 1234u64 -> StorageLive(_21) -> _21 = const 3u32 -> StorageLive(_22) -> _22 = const 1u32 -> -------------------------------------------------------------------------------------- -> _16 = const std::intrinsics::instrprof_increment(move _17, move _20, move _21, move _22) -> -> -> return -> -> StorageDead(_22) -> StorageDead(_21) -> StorageDead(_20) -> StorageDead(_17) -> StorageDead(_19) -> StorageDead(_16) -> _3 = _15 -> StorageDead(_15) - ---------------------------------# UNCHANGED------------------------------------------- -goto # UNCHANGED - --> # UNCHANGED - -FakeRead(ForLet, _3) # UNCHANGED -StorageDead(_4) # UNCHANGED -_0 = () # UNCHANGED -StorageDead(_3) # UNCHANGED -StorageDead(_2) # UNCHANGED -StorageDead(_1) # UNCHANGED --------------------------------------------------------------------------------------- -goto # UNCHANGED - --> # UNCHANGED - -return # from main() # UNCHANGED - -================================================= -As before, can I skip the extra variable (_15) and insert the call to intrinsic with _3 directly?: - - --------------------------------------------------------------------------------------- -switchInt(_4) # From above - --> otherwise # "NOT false" # UNCHANGED - -_3 = const true # UNCHANGED? - -> # ALL NEW AND NECESSARY TO CALL instrprof_increment() -> StorageLive(_16) # pointer to instrprof_increment() function ? -> StorageLive(_17) -> StorageLive(_18) -> StorageLive(_19) -> _19 = const {alloc1+0: &&[u8; 6]} -> _18 = &raw const (*(*_19)) -> _17 = move _18 as *const u8 (Pointer(ArrayToPointer)) -> StorageDead(_18) -> StorageLive(_20) -> _20 = const 1234u64 -> StorageLive(_21) -> _21 = const 3u32 -> StorageLive(_22) -> _22 = const 1u32 -> -------------------------------------------------------------------------------------- -> _16 = const std::intrinsics::instrprof_increment(move _17, move _20, move _21, move _22) -> -> -> return -> -> StorageDead(_22) -> StorageDead(_21) -> StorageDead(_20) -> StorageDead(_17) -> StorageDead(_19) -> StorageDead(_16) - ---------------------------------# UNCHANGED------------------------------------------- -goto # UNCHANGED - --> # UNCHANGED - -FakeRead(ForLet, _3) # UNCHANGED -StorageDead(_4) # UNCHANGED -_0 = () # UNCHANGED -StorageDead(_3) # UNCHANGED -StorageDead(_2) # UNCHANGED -StorageDead(_1) # UNCHANGED --------------------------------------------------------------------------------------- -goto # UNCHANGED - --> # UNCHANGED - -return # from main() # UNCHANGED - -*/ \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match_with_increment_alt.rs b/src/test/codegen/coverage-experiments/src/match_with_increment_alt.rs deleted file mode 100644 index 60586967920cb..0000000000000 --- a/src/test/codegen/coverage-experiments/src/match_with_increment_alt.rs +++ /dev/null @@ -1,296 +0,0 @@ -#![feature(core_intrinsics)] -//static TEST_FUNC_NAME: &'static [u8; 7] = b"main()\0"; - static TEST_FUNC_NAME: &'static [u8; 6] = b"main()"; -fn main() { - unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 3 as u32, 0 as u32) }; - let a = 1; - let b = 10; - let _result = match a < b { - true => { - unsafe { core::intrinsics::instrprof_increment(TEST_FUNC_NAME as *const u8, 1234 as u64, 3 as u32, 1 as u32) }; - true - } - _ => false, - }; -} - -/* - -ALTERNATE APPROACH: - - IS IT MUCH EASIER TO INSERT COUNTERS AT THE TOP OF A REGION THAT MUST EXECUTE IN ENTIRETY IF - PANIC DOES NOT OCCUR? AND WHAT IF WE ADD SUPPORT FOR PANIC UNWIND (later)? - - IS THERE A DETRACTOR COMPARED TO THE DEFERRED APPROACH WHEN CONSIDERING EXPRESSIONS MAY HAVE EARLY RETURNS? - - (BECAUSE, WE STILL NEED TO COUNT THE REGION LEADING UP TO THE EXPRESSION ANYWAY) - -================================================= -================================================= - -To inject an intrinsic after computing a final expression value of a coverage region: - -Replace the following basic block end (last statement plus terminator): - -... ... -StorageLive(_4) -StorageLive(_5) -_5 = _1 -StorageLive(_6) -_6 = _2 -_4 = Lt(move _5, move _6) -StorageDead(_6) -StorageDead(_5) - <------ to insert instrprof_increment() here -FakeRead(ForMatchedPlace, _4) --------------------------------------------------------------------------------------- -switchInt(_4) - - -================================================= -Insert call to intrinsic with: - -StorageLive(_4) # _4 is now meant for deferred FakeRead(ForMatchdPlace, _4) in BasicBlock after increment() call -StorageLive(_5) # Unchanged except _4 is now _5 -StorageLive(_6) # Unchanged except _5 is now _6 -_6 = _1 # Unchanged except _5 is now _6 -StorageLive(_7) # Unchanged except _6 is now _7 -_7 = _2 # Unchanged except _6 is now _7 -_5 = Lt(move _6, move _7) # Unchanged except _4, _5, _6 is now _5, _6, _7 -StorageDead(_7) # Unchanged except _6 is now _7 -StorageDead(_6) # Unchanged except _5 is now _6 - -FakeRead(ForLet, _5) # CHANGED ForMatchedPlace to ForLet - -> # ALL NEW AND NECESSARY TO CALL instrprof_increment() -> StorageLive(_8) # ?? stores function pointer to instrprof_increment function? -> StorageLive(_9) -> StorageLive(_10) -> StorageLive(_11) -> _11 = const {alloc1+0: &&[u8; 6]} -> _10 = &raw const (*(*_11)) -> _9 = move _10 as *const u8 (Pointer(ArrayToPointer)) -> StorageDead(_10) -> StorageLive(_12) -> _12 = const 1234u64 -> StorageLive(_13) -> _13 = const 3u32 -> StorageLive(_14) -> _14 = const 0u32 -> -------------------------------------------------------------------------------------- -> _8 = const std::intrinsics::instrprof_increment(move _9, move _12, move _13, move _14) -> -> -> return -> -> StorageDead(_14) -> StorageDead(_13) -> StorageDead(_12) -> StorageDead(_9) -> StorageDead(_11) -> StorageDead(_8) - -_4 = _5 # ARE THESE LINES REDUNDANT? CAN I JUST PASS _5 DIRECTLY TO FakeRead()? -StorageDead(_5) # DROP "_t" temp result of `let _t = a < b` - # (NOTE THAT IF SO, I CAN REMOVE _5 altogether, and use _4, which coincidentally makes less changes) - # SEE BELOW - -FakeRead(ForMatchedPlace, _4) # Unchanged --------------------------------------------------------------------------------------- -switchInt(_4) # Unchanged - - -================================================= -Can I skip the extra variable and insert call to intrinsic with: - -StorageLive(_4) # Unchanged -StorageLive(_5) # Unchanged -_5 = _1 # Unchanged -StorageLive(_6) # Unchanged -_6 = _2 # Unchanged -_4 = Lt(move _5, move _6) # Unchanged -StorageDead(_6) # Unchanged -StorageDead(_5) # Unchanged - -> # ALL NEW AND NECESSARY TO CALL instrprof_increment() -> FakeRead(ForLet, _4) # Save the post-increment result in temp "_t" -> StorageLive(_8) # ?? stores function pointer to instrprof_increment function? -> StorageLive(_9) -> StorageLive(_10) -> StorageLive(_11) -> _11 = const {alloc1+0: &&[u8; 6]} -> _10 = &raw const (*(*_11)) -> _9 = move _10 as *const u8 (Pointer(ArrayToPointer)) -> StorageDead(_10) -> StorageLive(_12) -> _12 = const 1234u64 -> StorageLive(_13) -> _13 = const 3u32 -> StorageLive(_14) -> _14 = const 0u32 -> -------------------------------------------------------------------------------------- -> _8 = const std::intrinsics::instrprof_increment(move _9, move _12, move _13, move _14) -> -> -> return -> -> StorageDead(_14) -> StorageDead(_13) -> StorageDead(_12) -> StorageDead(_9) -> StorageDead(_11) -> StorageDead(_8) - -FakeRead(ForMatchedPlace, _4) # Unchanged (PREVIOUSLY USED IN FakeRead(ForLet), is that OK?) --------------------------------------------------------------------------------------- -switchInt(_4) # Unchanged - - - - - -================================================= -================================================= - -For the second inserted call to instrprof_increment, without that call we have: - --------------------------------------------------------------------------------------- -switchInt(_4) # From above - --> otherwise # that is, "NOT false" - -_3 = const true - <------ to insert instrprof_increment() here --------------------------------------------------------------------------------------- -goto - --> # No label. No condition, and not a "return" - -FakeRead(ForLet, _3) # NOTE: Unused result -StorageDead(_4) -_0 = () -StorageDead(_3) -StorageDead(_2) -StorageDead(_1) --------------------------------------------------------------------------------------- -goto - --> # No label. No condition, and not a "return" - -return # from main() - - -================================================= -With the call to increment(): - --------------------------------------------------------------------------------------- -switchInt(_4) # From above - --> otherwise # "NOT false" # UNCHANGED - -StorageLive(_15) # CHANGED! Allocated new storage (_15) for the result of match, if true. -_15 = const true # UNCHANGED except _3 is now _15 -FakeRead(ForLet, _15) # CHANGED! Assign value to temporary (to be assigned to _3 later) ... Do I need to do this? - -> # ALL NEW AND NECESSARY TO CALL instrprof_increment() -> StorageLive(_16) # pointer to instrprof_increment() function ? -> StorageLive(_17) -> StorageLive(_18) -> StorageLive(_19) -> _19 = const {alloc1+0: &&[u8; 6]} -> _18 = &raw const (*(*_19)) -> _17 = move _18 as *const u8 (Pointer(ArrayToPointer)) -> StorageDead(_18) -> StorageLive(_20) -> _20 = const 1234u64 -> StorageLive(_21) -> _21 = const 3u32 -> StorageLive(_22) -> _22 = const 1u32 -> -------------------------------------------------------------------------------------- -> _16 = const std::intrinsics::instrprof_increment(move _17, move _20, move _21, move _22) -> -> -> return -> -> StorageDead(_22) -> StorageDead(_21) -> StorageDead(_20) -> StorageDead(_17) -> StorageDead(_19) -> StorageDead(_16) -> _3 = _15 -> StorageDead(_15) - ---------------------------------# UNCHANGED------------------------------------------- -goto # UNCHANGED - --> # UNCHANGED - -FakeRead(ForLet, _3) # UNCHANGED -StorageDead(_4) # UNCHANGED -_0 = () # UNCHANGED -StorageDead(_3) # UNCHANGED -StorageDead(_2) # UNCHANGED -StorageDead(_1) # UNCHANGED --------------------------------------------------------------------------------------- -goto # UNCHANGED - --> # UNCHANGED - -return # from main() # UNCHANGED - -================================================= -As before, can I skip the extra variable (_15) and insert the call to intrinsic with _3 directly?: - - --------------------------------------------------------------------------------------- -switchInt(_4) # From above - --> otherwise # "NOT false" # UNCHANGED - -_3 = const true # UNCHANGED? - -> # ALL NEW AND NECESSARY TO CALL instrprof_increment() -> StorageLive(_16) # pointer to instrprof_increment() function ? -> StorageLive(_17) -> StorageLive(_18) -> StorageLive(_19) -> _19 = const {alloc1+0: &&[u8; 6]} -> _18 = &raw const (*(*_19)) -> _17 = move _18 as *const u8 (Pointer(ArrayToPointer)) -> StorageDead(_18) -> StorageLive(_20) -> _20 = const 1234u64 -> StorageLive(_21) -> _21 = const 3u32 -> StorageLive(_22) -> _22 = const 1u32 -> -------------------------------------------------------------------------------------- -> _16 = const std::intrinsics::instrprof_increment(move _17, move _20, move _21, move _22) -> -> -> return -> -> StorageDead(_22) -> StorageDead(_21) -> StorageDead(_20) -> StorageDead(_17) -> StorageDead(_19) -> StorageDead(_16) - ---------------------------------# UNCHANGED------------------------------------------- -goto # UNCHANGED - --> # UNCHANGED - -FakeRead(ForLet, _3) # UNCHANGED -StorageDead(_4) # UNCHANGED -_0 = () # UNCHANGED -StorageDead(_3) # UNCHANGED -StorageDead(_2) # UNCHANGED -StorageDead(_1) # UNCHANGED --------------------------------------------------------------------------------------- -goto # UNCHANGED - --> # UNCHANGED - -return # from main() # UNCHANGED - -*/ \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match_without_increment.mir b/src/test/codegen/coverage-experiments/src/match_without_increment.mir deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/src/test/codegen/coverage-experiments/src/match_without_increment.rs b/src/test/codegen/coverage-experiments/src/match_without_increment.rs deleted file mode 100644 index fa85833e05434..0000000000000 --- a/src/test/codegen/coverage-experiments/src/match_without_increment.rs +++ /dev/null @@ -1,5 +0,0 @@ -fn main() { - let a = 1; - let b = 10; - let _result = match a < b { true => true, _ => false, }; -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/match_without_increment_alt.mir b/src/test/codegen/coverage-experiments/src/match_without_increment_alt.mir deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/src/test/codegen/coverage-experiments/src/question_mark_err_status_handling_with_comments.rs b/src/test/codegen/coverage-experiments/src/question_mark_err_status_handling_with_comments.rs deleted file mode 100644 index 03d11b2c230ca..0000000000000 --- a/src/test/codegen/coverage-experiments/src/question_mark_err_status_handling_with_comments.rs +++ /dev/null @@ -1,24 +0,0 @@ -/* */ #[inline(always)] -/* */ pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { -/* */ result -/* */ } -/* */ -/* - */ fn main() { -/* : I */ for countdown in __incr_cov("start", 10..0) { // span is just the while test expression -/* : ┃ */ let _ = countdown; -/* : ┃ */ __incr_cov("top of for", ()); -/* ┃ - */ } -/* - */ } - - -// -Z unpretty=val -- present the input source, unstable (and less-pretty) variants; -// valid types are any of the types for `--pretty`, as well as: -// `expanded`, `expanded,identified`, -// `expanded,hygiene` (with internal representations), -// `everybody_loops` (all function bodies replaced with `loop {}`), -// `hir` (the HIR), `hir,identified`, -// `hir,typed` (HIR with types for each node), -// `hir-tree` (dump the raw HIR), -// `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR) - -// argument to `pretty` must be one of `normal`, `expanded`, `identified`, or `expanded,identified` diff --git a/src/test/codegen/coverage-experiments/src/while.rs b/src/test/codegen/coverage-experiments/src/while.rs deleted file mode 100644 index 3cb185eda544f..0000000000000 --- a/src/test/codegen/coverage-experiments/src/while.rs +++ /dev/null @@ -1,23 +0,0 @@ -#[inline(always)] -pub fn __incr_cov(_region_loc: &str, result: T) -> T { - result -} - -fn main() { - let mut countdown = 10; - __incr_cov("block start",()); - while __incr_cov("while test", countdown > 0) { - countdown -= 1; - } - - let mut countdown = 10; - __incr_cov("after first while loop",()); - while __incr_cov("while test", countdown > 0) { - countdown -= 1; - if countdown < 5 { - __incr_cov("top of if countdown < 5",()); - break; - } - countdown -= 2; - } -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/while_clean.rs b/src/test/codegen/coverage-experiments/src/while_clean.rs deleted file mode 100644 index e9ed1efc220d4..0000000000000 --- a/src/test/codegen/coverage-experiments/src/while_clean.rs +++ /dev/null @@ -1,6 +0,0 @@ -fn main() { - let mut countdown = 10; - while countdown > 0 { - countdown -= 1; - } -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/while_early_return.rs b/src/test/codegen/coverage-experiments/src/while_early_return.rs deleted file mode 100644 index 35709ffba3a04..0000000000000 --- a/src/test/codegen/coverage-experiments/src/while_early_return.rs +++ /dev/null @@ -1,10 +0,0 @@ -fn main() -> u8 { // this will lower to HIR but will not compile: `main` can only return types that implement `std::process::Termination` - let mut countdown = 10; - while countdown > 0 { - if false { - return if countdown > 8 { 1 } else { return 2; }; - } - countdown -= 1; - } - 0 -} \ No newline at end of file diff --git a/src/test/codegen/coverage-experiments/src/while_with_comments.rs b/src/test/codegen/coverage-experiments/src/while_with_comments.rs deleted file mode 100644 index 56417fedf00df..0000000000000 --- a/src/test/codegen/coverage-experiments/src/while_with_comments.rs +++ /dev/null @@ -1,51 +0,0 @@ -/* */ #[inline(always)] -/* */ pub fn __incr_cov(_region_loc: &str, /*index: u32,*/ result: T) -> T { -/* */ result -/* */ } -/* */ -/* - */ fn main() { -/* ┃ */ let mut countdown = 10; -/* ┃ */ __incr_cov("block start",()); // Must increment before repeated while text expression -/* : I */ while __incr_cov("while test", countdown > 0) { // span is just the while test expression -/* : ┃ */ countdown -= 1; -/* : ┃ */ // __incr_cov("while loop",()); // Counter not needed, but span is computed as "while test" minus "block start" -/* : ┃ */ // If while criteria is tested 11 times, and the outer block runs only once, 11-1 = 10 -/* : ┃ */ // REMOVING COUNTER ASSUMES NO EARLY RETURN THOUGH. -/* : ┃ */ // I THINK WE CAN ONLY USE THE COUNTER EXPRESSION UP TO FIRST CONDITIONAL BLOCK, IF ANY (if, match, maybe any loop) -/* ┃ - */ } - - let mut countdown = 10; - __incr_cov("after first while loop",()); - while __incr_cov("while test", countdown > 0) { - countdown -= 1; - // if __incr_cov("top of while loop", countdown < 5) { - if countdown < 5 { // "top of while loop" = counter expression "while test" - "after first while loop" - __incr_cov("top of if countdown < 5",()); - break; - } - countdown -= 2; - // __incr_cov("after if countdown < 5 block", ()); - // "after if countdown < 5 block" = counter expression "top of while loop" - "top of if countdown < 5" - // HOWEVER, WE CAN ONLY REMOVE THE COUNTER AND USE COUNTER EXPRESSION IF WE **KNOW** THAT THE BODY OF THE IF - // WILL **ALWAYS** BREAK (OR RETURN, OR CONTINUE?) - // AND THUS WE TREAT THE STATEMENTS FOLLOWING THE IF BLOCK AS IF THEY WERE AN ELSE BLOCK. - // THAT'S A LOT TO ASK. - - // PERHAPS TREAT EARLY RETURNS AS A SPECIAL KIND OF COUNTER AND IF ANY ARE INVOKED BEFORE STATEMENTS AFTER THE BLOCK THAT CONTAINS THEM, - // THEN SUBTRACT THOSE COUNTS FROM THE COUNT BEFORE THE BLOCK (AS WE DO HERE)? (SO ONE SET OF EXPRESSIONS MUST SUM ALL OF THE EARLY - // RETURNS) - } -/* - */ } - - -// -Z unpretty=val -- present the input source, unstable (and less-pretty) variants; -// valid types are any of the types for `--pretty`, as well as: -// `expanded`, `expanded,identified`, -// `expanded,hygiene` (with internal representations), -// `everybody_loops` (all function bodies replaced with `loop {}`), -// `hir` (the HIR), `hir,identified`, -// `hir,typed` (HIR with types for each node), -// `hir-tree` (dump the raw HIR), -// `mir` (the MIR), or `mir-cfg` (graphviz formatted MIR) - -// argument to `pretty` must be one of `normal`, `expanded`, `identified`, or `expanded,identified` From d2cd59a0315809afa58df0196c34b33ee0a8c161 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Fri, 5 Jun 2020 09:14:45 -0700 Subject: [PATCH 18/34] Add case for count_code_region() extern lang_item As suggested in PR feedback: https://github.com/rust-lang/rust/pull/73011#discussion_r435728923 This allows count_code_region() to be handled like a normal intrinsic so the InstanceDef::InjectedCode variant is no longer needed. --- src/libcore/intrinsics.rs | 21 ++++++------------- src/librustc_codegen_ssa/mir/block.rs | 5 +---- src/librustc_middle/mir/mono.rs | 1 - src/librustc_middle/ty/instance.rs | 7 ------- src/librustc_middle/ty/mod.rs | 1 - src/librustc_middle/ty/structural_impls.rs | 11 +++------- src/librustc_mir/interpret/terminator.rs | 3 --- src/librustc_mir/monomorphize/collector.rs | 5 +---- src/librustc_mir/monomorphize/partitioning.rs | 2 -- src/librustc_mir/shim.rs | 3 --- src/librustc_passes/weak_lang_items.rs | 16 ++++++++++++-- src/librustc_ty/instance.rs | 4 ---- src/librustc_typeck/check/intrinsic.rs | 2 ++ 13 files changed, 27 insertions(+), 54 deletions(-) diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 06a432a26961e..7ce5814d39a02 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1941,22 +1941,13 @@ extern "rust-intrinsic" { /// /// Perma-unstable: do not use. pub fn miri_start_panic(payload: *mut u8) -> !; -} -/// Defines the `count_code_region` intrinsic as a `LangItem`. `LangItem`s require a function body -/// to register its DefId with the LangItem entry. The function body is never actually called (and -/// is therefore implemented as an aborting stub) because it is replaced with the LLVM intrinsic -/// `llvm.instrprof.increment` by -/// `rustc_codegen_llvm::intrinsic::IntrinsicCallMethods::codegen_intrinsic_call()`. -#[cfg(not(bootstrap))] -#[cfg_attr(not(bootstrap), lang = "count_code_region")] -fn count_code_region(_index: u32) { - // remove `unsafe` (and safety comment) on bootstrap bump - #[cfg_attr(not(bootstrap), allow(unused_unsafe))] - // SAFETY: the `abort` intrinsic has no requirements to be called. - unsafe { - abort() - } + /// Internal placeholder for injecting code coverage counters when the "instrument-coverage" + /// option is enabled. The placeholder is replaced with `llvm.instrprof.increment` during code + /// generation. + #[cfg(not(bootstrap))] + #[cfg_attr(not(bootstrap), lang = "count_code_region")] + pub fn count_code_region(_index: u32); } // Some functions are defined here because they accidentally got made diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs index d7db657154993..665ef77090987 100644 --- a/src/librustc_codegen_ssa/mir/block.rs +++ b/src/librustc_codegen_ssa/mir/block.rs @@ -566,10 +566,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { // Handle intrinsics old codegen wants Expr's for, ourselves. let intrinsic = match def { - Some(ty::InstanceDef::Intrinsic(def_id)) - | Some(ty::InstanceDef::InjectedCode(def_id)) => { - Some(bx.tcx().item_name(def_id).as_str()) - } + Some(ty::InstanceDef::Intrinsic(def_id)) => Some(bx.tcx().item_name(def_id).as_str()), _ => None, }; let intrinsic = intrinsic.as_ref().map(|s| &s[..]); diff --git a/src/librustc_middle/mir/mono.rs b/src/librustc_middle/mir/mono.rs index b2c00849d9f83..c889dbc0a4498 100644 --- a/src/librustc_middle/mir/mono.rs +++ b/src/librustc_middle/mir/mono.rs @@ -352,7 +352,6 @@ impl<'tcx> CodegenUnit<'tcx> { InstanceDef::VtableShim(..) | InstanceDef::ReifyShim(..) | InstanceDef::Intrinsic(..) - | InstanceDef::InjectedCode(..) | InstanceDef::FnPtrShim(..) | InstanceDef::Virtual(..) | InstanceDef::ClosureOnceShim { .. } diff --git a/src/librustc_middle/ty/instance.rs b/src/librustc_middle/ty/instance.rs index 4f88e64c5039a..1ce079821a22e 100644 --- a/src/librustc_middle/ty/instance.rs +++ b/src/librustc_middle/ty/instance.rs @@ -21,10 +21,6 @@ pub enum InstanceDef<'tcx> { Item(DefId), Intrinsic(DefId), - /// Injected call to a placeholder function that is replaced with - /// For example: `core::intrinsic::count_code_region()` for code coverage. - InjectedCode(DefId), - /// `::method` where `method` receives unsizeable `self: Self`. VtableShim(DefId), @@ -153,7 +149,6 @@ impl<'tcx> InstanceDef<'tcx> { | InstanceDef::FnPtrShim(def_id, _) | InstanceDef::Virtual(def_id, _) | InstanceDef::Intrinsic(def_id) - | InstanceDef::InjectedCode(def_id) | InstanceDef::ClosureOnceShim { call_once: def_id } | InstanceDef::DropGlue(def_id, _) | InstanceDef::CloneShim(def_id, _) => def_id, @@ -241,7 +236,6 @@ impl<'tcx> fmt::Display for Instance<'tcx> { InstanceDef::VtableShim(_) => write!(f, " - shim(vtable)"), InstanceDef::ReifyShim(_) => write!(f, " - shim(reify)"), InstanceDef::Intrinsic(_) => write!(f, " - intrinsic"), - InstanceDef::InjectedCode(_) => write!(f, " - injected-code"), InstanceDef::Virtual(_, num) => write!(f, " - virtual#{}", num), InstanceDef::FnPtrShim(_, ty) => write!(f, " - shim({:?})", ty), InstanceDef::ClosureOnceShim { .. } => write!(f, " - shim"), @@ -421,7 +415,6 @@ impl<'tcx> Instance<'tcx> { | InstanceDef::FnPtrShim(..) | InstanceDef::Item(_) | InstanceDef::Intrinsic(..) - | InstanceDef::InjectedCode(..) | InstanceDef::ReifyShim(..) | InstanceDef::Virtual(..) | InstanceDef::VtableShim(..) => Some(self.substs), diff --git a/src/librustc_middle/ty/mod.rs b/src/librustc_middle/ty/mod.rs index 9b1e717731e82..93ef73171993c 100644 --- a/src/librustc_middle/ty/mod.rs +++ b/src/librustc_middle/ty/mod.rs @@ -2717,7 +2717,6 @@ impl<'tcx> TyCtxt<'tcx> { ty::InstanceDef::VtableShim(..) | ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::Intrinsic(..) - | ty::InstanceDef::InjectedCode(..) | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::Virtual(..) | ty::InstanceDef::ClosureOnceShim { .. } diff --git a/src/librustc_middle/ty/structural_impls.rs b/src/librustc_middle/ty/structural_impls.rs index b6cbd2082a518..f6f5dfd651612 100644 --- a/src/librustc_middle/ty/structural_impls.rs +++ b/src/librustc_middle/ty/structural_impls.rs @@ -674,7 +674,6 @@ impl<'a, 'tcx> Lift<'tcx> for ty::InstanceDef<'a> { ty::InstanceDef::VtableShim(def_id) => Some(ty::InstanceDef::VtableShim(def_id)), ty::InstanceDef::ReifyShim(def_id) => Some(ty::InstanceDef::ReifyShim(def_id)), ty::InstanceDef::Intrinsic(def_id) => Some(ty::InstanceDef::Intrinsic(def_id)), - ty::InstanceDef::InjectedCode(def_id) => Some(ty::InstanceDef::Intrinsic(def_id)), ty::InstanceDef::FnPtrShim(def_id, ref ty) => { Some(ty::InstanceDef::FnPtrShim(def_id, tcx.lift(ty)?)) } @@ -847,7 +846,6 @@ impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> { VtableShim(did) => VtableShim(did.fold_with(folder)), ReifyShim(did) => ReifyShim(did.fold_with(folder)), Intrinsic(did) => Intrinsic(did.fold_with(folder)), - InjectedCode(did) => InjectedCode(did.fold_with(folder)), FnPtrShim(did, ty) => FnPtrShim(did.fold_with(folder), ty.fold_with(folder)), Virtual(did, i) => Virtual(did.fold_with(folder), i), ClosureOnceShim { call_once } => { @@ -863,12 +861,9 @@ impl<'tcx> TypeFoldable<'tcx> for ty::instance::Instance<'tcx> { use crate::ty::InstanceDef::*; self.substs.visit_with(visitor) || match self.def { - Item(did) - | VtableShim(did) - | ReifyShim(did) - | Intrinsic(did) - | InjectedCode(did) - | Virtual(did, _) => did.visit_with(visitor), + Item(did) | VtableShim(did) | ReifyShim(did) | Intrinsic(did) | Virtual(did, _) => { + did.visit_with(visitor) + } FnPtrShim(did, ty) | CloneShim(did, ty) => { did.visit_with(visitor) || ty.visit_with(visitor) } diff --git a/src/librustc_mir/interpret/terminator.rs b/src/librustc_mir/interpret/terminator.rs index 82fa471b54d73..cd7621ea9752b 100644 --- a/src/librustc_mir/interpret/terminator.rs +++ b/src/librustc_mir/interpret/terminator.rs @@ -257,9 +257,6 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { assert!(caller_abi == Abi::RustIntrinsic || caller_abi == Abi::PlatformIntrinsic); M::call_intrinsic(self, instance, args, ret, unwind) } - ty::InstanceDef::InjectedCode(..) => { - M::call_intrinsic(self, instance, args, ret, unwind) - } ty::InstanceDef::VtableShim(..) | ty::InstanceDef::ReifyShim(..) | ty::InstanceDef::ClosureOnceShim { .. } diff --git a/src/librustc_mir/monomorphize/collector.rs b/src/librustc_mir/monomorphize/collector.rs index 24c4226bb4e94..994d1e69f2e3e 100644 --- a/src/librustc_mir/monomorphize/collector.rs +++ b/src/librustc_mir/monomorphize/collector.rs @@ -714,9 +714,7 @@ fn visit_instance_use<'tcx>( } match instance.def { - ty::InstanceDef::Virtual(..) - | ty::InstanceDef::Intrinsic(_) - | ty::InstanceDef::InjectedCode(_) => { + ty::InstanceDef::Virtual(..) | ty::InstanceDef::Intrinsic(_) => { if !is_direct_call { bug!("{:?} being reified", instance); } @@ -753,7 +751,6 @@ fn should_monomorphize_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::DropGlue(..) | ty::InstanceDef::Intrinsic(_) - | ty::InstanceDef::InjectedCode(_) | ty::InstanceDef::CloneShim(..) => return true, }; diff --git a/src/librustc_mir/monomorphize/partitioning.rs b/src/librustc_mir/monomorphize/partitioning.rs index 7c97b9d611e15..db1ea72c0a531 100644 --- a/src/librustc_mir/monomorphize/partitioning.rs +++ b/src/librustc_mir/monomorphize/partitioning.rs @@ -322,7 +322,6 @@ fn mono_item_visibility( | InstanceDef::FnPtrShim(..) | InstanceDef::Virtual(..) | InstanceDef::Intrinsic(..) - | InstanceDef::InjectedCode(..) | InstanceDef::ClosureOnceShim { .. } | InstanceDef::DropGlue(..) | InstanceDef::CloneShim(..) => return Visibility::Hidden, @@ -718,7 +717,6 @@ fn characteristic_def_id_of_mono_item<'tcx>( | ty::InstanceDef::FnPtrShim(..) | ty::InstanceDef::ClosureOnceShim { .. } | ty::InstanceDef::Intrinsic(..) - | ty::InstanceDef::InjectedCode(..) | ty::InstanceDef::DropGlue(..) | ty::InstanceDef::Virtual(..) | ty::InstanceDef::CloneShim(..) => return None, diff --git a/src/librustc_mir/shim.rs b/src/librustc_mir/shim.rs index b4477d9c86d43..f95fd9b9e90c5 100644 --- a/src/librustc_mir/shim.rs +++ b/src/librustc_mir/shim.rs @@ -109,9 +109,6 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<' ty::InstanceDef::Intrinsic(_) => { bug!("creating shims from intrinsics ({:?}) is unsupported", instance) } - ty::InstanceDef::InjectedCode(_) => { - bug!("creating shims from injected code ({:?}) is unsupported", instance) - } }; debug!("make_shim({:?}) = untransformed {:?}", instance, result); diff --git a/src/librustc_passes/weak_lang_items.rs b/src/librustc_passes/weak_lang_items.rs index 96ec23692df51..f2f07b5d4fb26 100644 --- a/src/librustc_passes/weak_lang_items.rs +++ b/src/librustc_passes/weak_lang_items.rs @@ -5,10 +5,12 @@ use rustc_errors::struct_span_err; use rustc_hir as hir; use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor}; use rustc_hir::lang_items; +use rustc_hir::lang_items::ITEM_REFS; use rustc_hir::weak_lang_items::WEAK_ITEMS_REFS; use rustc_middle::middle::lang_items::whitelisted; use rustc_middle::ty::TyCtxt; use rustc_session::config::CrateType; +use rustc_span::symbol::sym; use rustc_span::symbol::Symbol; use rustc_span::Span; @@ -70,11 +72,21 @@ fn verify<'tcx>(tcx: TyCtxt<'tcx>, items: &lang_items::LanguageItems) { } impl<'a, 'tcx> Context<'a, 'tcx> { - fn register(&mut self, name: Symbol, span: Span) { + fn register(&mut self, name: Symbol, span: Span, hir_id: hir::HirId) { if let Some(&item) = WEAK_ITEMS_REFS.get(&name) { if self.items.require(item).is_err() { self.items.missing.push(item); } + } else if name == sym::count_code_region { + // `core::intrinsics::code_count_region()` is (currently) the only `extern` lang item + // that is never actually linked. It is not a `weak_lang_item` that can be registered + // when used, and should be registered here instead. + if let Some((item_index, _)) = ITEM_REFS.get(&*name.as_str()).cloned() { + if self.items.items[item_index].is_none() { + let item_def_id = self.tcx.hir().local_def_id(hir_id).to_def_id(); + self.items.items[item_index] = Some(item_def_id); + } + } } else { struct_span_err!(self.tcx.sess, span, E0264, "unknown external lang item: `{}`", name) .emit(); @@ -91,7 +103,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { fn visit_foreign_item(&mut self, i: &hir::ForeignItem<'_>) { if let Some((lang_item, _)) = hir::lang_items::extract(&i.attrs) { - self.register(lang_item, i.span); + self.register(lang_item, i.span, i.hir_id); } intravisit::walk_foreign_item(self, i) } diff --git a/src/librustc_ty/instance.rs b/src/librustc_ty/instance.rs index d4ceeff324450..0acf769168137 100644 --- a/src/librustc_ty/instance.rs +++ b/src/librustc_ty/instance.rs @@ -35,10 +35,6 @@ fn resolve_instance<'tcx>( debug!(" => intrinsic"); ty::InstanceDef::Intrinsic(def_id) } - ty::FnDef(def_id, _) if Some(def_id) == tcx.lang_items().count_code_region_fn() => { - debug!(" => injected placeholder function to be replaced"); - ty::InstanceDef::InjectedCode(def_id) - } ty::FnDef(def_id, substs) if Some(def_id) == tcx.lang_items().drop_in_place_fn() => { let ty = substs.type_at(0); diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index bded2c695c9db..3ec6973a17d56 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -347,6 +347,8 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) { return; } + "count_code_region" => (0, vec![tcx.types.u32], tcx.mk_unit()), + ref other => { struct_span_err!( tcx.sess, From e4df7e70466611a49d3ff6c49d162b2045173449 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Fri, 5 Jun 2020 09:49:31 -0700 Subject: [PATCH 19/34] Update src/libcore/intrinsics.rs Co-authored-by: bjorn3 --- src/libcore/intrinsics.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 7ce5814d39a02..3806d3ae25487 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1946,7 +1946,7 @@ extern "rust-intrinsic" { /// option is enabled. The placeholder is replaced with `llvm.instrprof.increment` during code /// generation. #[cfg(not(bootstrap))] - #[cfg_attr(not(bootstrap), lang = "count_code_region")] + #[lang = "count_code_region"] pub fn count_code_region(_index: u32); } From 7e49a9ec59f7950efa9950b65c10f9b3f3a4b6b2 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Sun, 7 Jun 2020 19:35:15 -0700 Subject: [PATCH 20/34] moved to post_borrowck_cleanup & used MirPatch --- src/librustc_mir/interpret/intrinsics.rs | 1 + .../transform/instrument_coverage.rs | 112 +++++++++--------- src/librustc_mir/transform/mod.rs | 6 +- src/librustc_session/options.rs | 4 +- 4 files changed, 65 insertions(+), 58 deletions(-) diff --git a/src/librustc_mir/interpret/intrinsics.rs b/src/librustc_mir/interpret/intrinsics.rs index 47e5b8b4fcec4..4d8120794f885 100644 --- a/src/librustc_mir/interpret/intrinsics.rs +++ b/src/librustc_mir/interpret/intrinsics.rs @@ -389,6 +389,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { ); self.copy_op(self.operand_index(args[0], index)?, dest)?; } + sym::count_code_region => (), _ => return Ok(false), } diff --git a/src/librustc_mir/transform/instrument_coverage.rs b/src/librustc_mir/transform/instrument_coverage.rs index 045cd03d1f7da..0604caadaea38 100644 --- a/src/librustc_mir/transform/instrument_coverage.rs +++ b/src/librustc_mir/transform/instrument_coverage.rs @@ -1,8 +1,7 @@ use crate::transform::{MirPass, MirSource}; -use rustc_index::vec::Idx; +use crate::util::patch::MirPatch; use rustc_middle::mir::interpret::Scalar; use rustc_middle::mir::*; -use rustc_middle::mir::{Local, LocalDecl}; use rustc_middle::ty; use rustc_middle::ty::Ty; use rustc_middle::ty::TyCtxt; @@ -16,69 +15,62 @@ pub struct InstrumentCoverage; * the intrinsic llvm.instrprof.increment. */ -// FIXME(richkadel): As a first step, counters are only injected at the top of each function. -// The complete solution will inject counters at each conditional code branch. - impl<'tcx> MirPass<'tcx> for InstrumentCoverage { fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) { if tcx.sess.opts.debugging_opts.instrument_coverage { - if let Some(callee_fn_def_id) = tcx.lang_items().count_code_region_fn() { - debug!("instrumenting {:?}", src.def_id()); - instrument_coverage(tcx, callee_fn_def_id, body); - } + debug!("instrumenting {:?}", src.def_id()); + instrument_coverage(tcx, body); } } } -pub fn instrument_coverage<'tcx>( - tcx: TyCtxt<'tcx>, - callee_fn_def_id: DefId, - body: &mut Body<'tcx>, -) { +// The first counter (start of the function) is index zero. +const INIT_FUNCTION_COUNTER: u128 = 0; + +/// Injects calls to placeholder function `count_code_region()`. +// FIXME(richkadel): As a first step, counters are only injected at the top of each function. +// The complete solution will inject counters at each conditional code branch. +pub fn instrument_coverage<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let span = body.span.shrink_to_lo(); - let ret_ty = tcx.fn_sig(callee_fn_def_id).output(); + let count_code_region_fn = + function_handle(tcx, span, tcx.lang_items().count_code_region_fn().unwrap()); + let counter_index = const_int_operand(tcx, span, tcx.types.u32, INIT_FUNCTION_COUNTER); + + let mut patch = MirPatch::new(body); + + let new_block = patch.new_block(placeholder_block(SourceInfo::outermost(body.span))); + let next_block = START_BLOCK; + + let temp = patch.new_temp(tcx.mk_unit(), body.span); + patch.patch_terminator( + new_block, + TerminatorKind::Call { + func: count_code_region_fn, + args: vec![counter_index], + // new_block will swapped with the next_block, after applying patch + destination: Some((Place::from(temp), new_block)), + cleanup: None, + from_hir_call: false, + }, + ); + + patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp)); + patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp)); + + patch.apply(body); + + // To insert the `new_block` in front of the first block in the counted branch (for example, + // the START_BLOCK, at the top of the function), just swap the indexes, leaving the rest of the + // graph unchanged. + body.basic_blocks_mut().swap(next_block, new_block); +} + +fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, span: Span, fn_def_id: DefId) -> Operand<'tcx> { + let ret_ty = tcx.fn_sig(fn_def_id).output(); let ret_ty = ret_ty.no_bound_vars().unwrap(); let substs = tcx.mk_substs(::std::iter::once(ty::subst::GenericArg::from(ret_ty))); - - let count_code_region_fn: Operand<'_> = - Operand::function_handle(tcx, callee_fn_def_id, substs, span); - - let index = const_int_operand(tcx, span.clone(), tcx.types.u32, 0); - - let args = vec![index]; - - let source_info = SourceInfo { span: span, scope: OUTERMOST_SOURCE_SCOPE }; - - let new_block = START_BLOCK + body.basic_blocks().len(); - - let next_local = body.local_decls.len(); - let new_temp = Local::new(next_local); - let unit_temp = Place::from(new_temp); - - let storage_live = Statement { source_info, kind: StatementKind::StorageLive(new_temp) }; - let storage_dead = Statement { source_info, kind: StatementKind::StorageDead(new_temp) }; - - let count_code_region_call = TerminatorKind::Call { - func: count_code_region_fn, - args, - destination: Some((unit_temp, new_block)), - cleanup: None, - from_hir_call: false, - }; - - body.local_decls.push(LocalDecl::new(tcx.mk_unit(), body.span)); - body.basic_blocks_mut().push(BasicBlockData { - statements: vec![storage_live], - is_cleanup: false, - terminator: Some(Terminator { source_info, kind: count_code_region_call }), - }); - - body.basic_blocks_mut().swap(START_BLOCK, new_block); - body[new_block].statements.push(storage_dead); - - // FIXME(richkadel): ALSO add each computed Span for each conditional branch to the coverage map - // and provide that map to LLVM to encode in the final binary. + Operand::function_handle(tcx, fn_def_id, substs, span) } fn const_int_operand<'tcx>( @@ -98,3 +90,15 @@ fn const_int_operand<'tcx>( literal: ty::Const::from_scalar(tcx, Scalar::from_uint(val, size), ty), }) } + +fn placeholder_block<'tcx>(source_info: SourceInfo) -> BasicBlockData<'tcx> { + BasicBlockData { + statements: vec![], + terminator: Some(Terminator { + source_info, + // this gets overwritten by the counter Call + kind: TerminatorKind::Unreachable, + }), + is_cleanup: false, + } +} diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index e03ef48f74838..956ddd2051bac 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -288,8 +288,6 @@ fn mir_validated( &[&[ // What we need to run borrowck etc. &promote_pass, - // FIXME(richkadel): is this the best place for the InstrumentCoverage pass? - &instrument_coverage::InstrumentCoverage, &simplify::SimplifyCfg::new("qualify-consts"), ]], ); @@ -340,6 +338,10 @@ fn run_post_borrowck_cleanup_passes<'tcx>( // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late, // but before optimizations begin. &add_retag::AddRetag, + // If the `instrument-coverage` option is enabled, analyze the CFG, identify each + // conditional branch, construct a coverage map to be passed to LLVM, and inject counters + // where needed. + &instrument_coverage::InstrumentCoverage, &simplify::SimplifyCfg::new("elaborate-drops"), ]; diff --git a/src/librustc_session/options.rs b/src/librustc_session/options.rs index 599ce595e1314..2d231359057fd 100644 --- a/src/librustc_session/options.rs +++ b/src/librustc_session/options.rs @@ -877,8 +877,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, (such as entering an empty infinite loop) by inserting llvm.sideeffect \ (default: no)"), instrument_coverage: bool = (false, parse_bool, [TRACKED], - "instrument the generated code with LLVM code region counters for \ - generating coverage reports (default: no)"), + "instrument the generated code with LLVM code region counters to \ + (in the future) generate coverage reports (experimental; default: no)"), instrument_mcount: bool = (false, parse_bool, [TRACKED], "insert function instrument code for mcount-based tracing (default: no)"), keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED], From 46ebd57c42439b3aedcb160f70b022a4f59f4afa Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Mon, 8 Jun 2020 16:20:26 -0700 Subject: [PATCH 21/34] moved instrument_coverage pass, optimized scalar, added FIXME --- src/librustc_codegen_llvm/intrinsic.rs | 5 ++++ .../transform/instrument_coverage.rs | 27 ++++++++++++------- src/librustc_mir/transform/mod.rs | 8 +++--- 3 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/librustc_codegen_llvm/intrinsic.rs b/src/librustc_codegen_llvm/intrinsic.rs index 7fddda99185b4..95465939070a0 100644 --- a/src/librustc_codegen_llvm/intrinsic.rs +++ b/src/librustc_codegen_llvm/intrinsic.rs @@ -148,6 +148,11 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { caller_fn_path ); + // FIXME(richkadel): (1) Replace raw function name with mangled function name; + // (2) Replace hardcoded `1234` in `hash` with a computed hash (as discussed in) + // the MCP (compiler-team/issues/278); and replace the hardcoded `1` for + // `num_counters` with the actual number of counters per function (when the + // changes are made to inject more than one counter per function). let (fn_name, _len_val) = self.const_str(Symbol::intern(&caller_fn_path)); let index = args[0].immediate(); let hash = self.const_u64(1234); diff --git a/src/librustc_mir/transform/instrument_coverage.rs b/src/librustc_mir/transform/instrument_coverage.rs index 0604caadaea38..27abe813b067d 100644 --- a/src/librustc_mir/transform/instrument_coverage.rs +++ b/src/librustc_mir/transform/instrument_coverage.rs @@ -7,6 +7,7 @@ use rustc_middle::ty::Ty; use rustc_middle::ty::TyCtxt; use rustc_span::def_id::DefId; use rustc_span::Span; +use rustc_target::abi; pub struct InstrumentCoverage; @@ -25,7 +26,7 @@ impl<'tcx> MirPass<'tcx> for InstrumentCoverage { } // The first counter (start of the function) is index zero. -const INIT_FUNCTION_COUNTER: u128 = 0; +const INIT_FUNCTION_COUNTER: u32 = 0; /// Injects calls to placeholder function `count_code_region()`. // FIXME(richkadel): As a first step, counters are only injected at the top of each function. @@ -35,7 +36,8 @@ pub fn instrument_coverage<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let count_code_region_fn = function_handle(tcx, span, tcx.lang_items().count_code_region_fn().unwrap()); - let counter_index = const_int_operand(tcx, span, tcx.types.u32, INIT_FUNCTION_COUNTER); + let counter_index = + const_int_operand(tcx, span, tcx.types.u32, Scalar::from_u32(INIT_FUNCTION_COUNTER)); let mut patch = MirPatch::new(body); @@ -77,17 +79,24 @@ fn const_int_operand<'tcx>( tcx: TyCtxt<'tcx>, span: Span, ty: Ty<'tcx>, - val: u128, + val: Scalar, ) -> Operand<'tcx> { - let param_env_and_ty = ty::ParamEnv::empty().and(ty); - let size = tcx - .layout_of(param_env_and_ty) - .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e)) - .size; + debug_assert!({ + let param_env_and_ty = ty::ParamEnv::empty().and(ty); + let type_size = tcx + .layout_of(param_env_and_ty) + .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e)) + .size; + let scalar_size = abi::Size::from_bytes(match val { + Scalar::Raw { size, .. } => size, + _ => panic!("Invalid scalar type {:?}", val), + }); + scalar_size == type_size + }); Operand::Constant(box Constant { span, user_ty: None, - literal: ty::Const::from_scalar(tcx, Scalar::from_uint(val, size), ty), + literal: ty::Const::from_scalar(tcx, val, ty), }) } diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index 956ddd2051bac..846ed1f86d8d6 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -289,6 +289,10 @@ fn mir_validated( // What we need to run borrowck etc. &promote_pass, &simplify::SimplifyCfg::new("qualify-consts"), + // If the `instrument-coverage` option is enabled, analyze the CFG, identify each + // conditional branch, construct a coverage map to be passed to LLVM, and inject counters + // where needed. + &instrument_coverage::InstrumentCoverage, ]], ); @@ -338,10 +342,6 @@ fn run_post_borrowck_cleanup_passes<'tcx>( // `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late, // but before optimizations begin. &add_retag::AddRetag, - // If the `instrument-coverage` option is enabled, analyze the CFG, identify each - // conditional branch, construct a coverage map to be passed to LLVM, and inject counters - // where needed. - &instrument_coverage::InstrumentCoverage, &simplify::SimplifyCfg::new("elaborate-drops"), ]; From 20aba8f634c13fa2bb1b043b51a074769dc06f66 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Wed, 10 Jun 2020 09:54:02 -0700 Subject: [PATCH 22/34] added test, Operand::const_from_scalar, require_lang_item, & comments Addresses feedback from @oli-obk (Thanks!) --- src/librustc_middle/mir/mod.rs | 28 +++++++ src/librustc_mir/interpret/intrinsics.rs | 1 + .../transform/instrument_coverage.rs | 52 ++++-------- src/test/mir-opt/instrument_coverage.rs | 19 +++++ .../rustc.bar.InstrumentCoverage.diff | 41 ++++++++++ .../rustc.main.InstrumentCoverage.diff | 82 +++++++++++++++++++ 6 files changed, 186 insertions(+), 37 deletions(-) create mode 100644 src/test/mir-opt/instrument_coverage.rs create mode 100644 src/test/mir-opt/instrument_coverage/rustc.bar.InstrumentCoverage.diff create mode 100644 src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff diff --git a/src/librustc_middle/mir/mod.rs b/src/librustc_middle/mir/mod.rs index 27848684706d6..11ae2cf72c462 100644 --- a/src/librustc_middle/mir/mod.rs +++ b/src/librustc_middle/mir/mod.rs @@ -29,6 +29,7 @@ use rustc_macros::HashStable; use rustc_serialize::{Decodable, Encodable}; use rustc_span::symbol::Symbol; use rustc_span::{Span, DUMMY_SP}; +use rustc_target::abi; use rustc_target::asm::InlineAsmRegOrRegClass; use std::borrow::Cow; use std::fmt::{self, Debug, Display, Formatter, Write}; @@ -2218,6 +2219,33 @@ impl<'tcx> Operand<'tcx> { }) } + /// Convenience helper to make a literal-like constant from a given scalar value. + /// Since this is used to synthesize MIR, assumes `user_ty` is None. + pub fn const_from_scalar( + tcx: TyCtxt<'tcx>, + ty: Ty<'tcx>, + val: Scalar, + span: Span, + ) -> Operand<'tcx> { + debug_assert!({ + let param_env_and_ty = ty::ParamEnv::empty().and(ty); + let type_size = tcx + .layout_of(param_env_and_ty) + .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e)) + .size; + let scalar_size = abi::Size::from_bytes(match val { + Scalar::Raw { size, .. } => size, + _ => panic!("Invalid scalar type {:?}", val), + }); + scalar_size == type_size + }); + Operand::Constant(box Constant { + span, + user_ty: None, + literal: ty::Const::from_scalar(tcx, val, ty), + }) + } + pub fn to_copy(&self) -> Self { match *self { Operand::Copy(_) | Operand::Constant(_) => self.clone(), diff --git a/src/librustc_mir/interpret/intrinsics.rs b/src/librustc_mir/interpret/intrinsics.rs index 4d8120794f885..ac28ccd181520 100644 --- a/src/librustc_mir/interpret/intrinsics.rs +++ b/src/librustc_mir/interpret/intrinsics.rs @@ -389,6 +389,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { ); self.copy_op(self.operand_index(args[0], index)?, dest)?; } + // FIXME(#73156): Handle source code coverage in const eval sym::count_code_region => (), _ => return Ok(false), } diff --git a/src/librustc_mir/transform/instrument_coverage.rs b/src/librustc_mir/transform/instrument_coverage.rs index 27abe813b067d..fda7ad731fa27 100644 --- a/src/librustc_mir/transform/instrument_coverage.rs +++ b/src/librustc_mir/transform/instrument_coverage.rs @@ -1,21 +1,17 @@ use crate::transform::{MirPass, MirSource}; use crate::util::patch::MirPatch; +use rustc_hir::lang_items; use rustc_middle::mir::interpret::Scalar; use rustc_middle::mir::*; use rustc_middle::ty; -use rustc_middle::ty::Ty; use rustc_middle::ty::TyCtxt; use rustc_span::def_id::DefId; use rustc_span::Span; -use rustc_target::abi; +/// Inserts call to count_code_region() as a placeholder to be replaced during code generation with +/// the intrinsic llvm.instrprof.increment. pub struct InstrumentCoverage; -/** - * Inserts call to count_code_region() as a placeholder to be replaced during code generation with - * the intrinsic llvm.instrprof.increment. - */ - impl<'tcx> MirPass<'tcx> for InstrumentCoverage { fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, body: &mut Body<'tcx>) { if tcx.sess.opts.debugging_opts.instrument_coverage { @@ -34,10 +30,17 @@ const INIT_FUNCTION_COUNTER: u32 = 0; pub fn instrument_coverage<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { let span = body.span.shrink_to_lo(); - let count_code_region_fn = - function_handle(tcx, span, tcx.lang_items().count_code_region_fn().unwrap()); - let counter_index = - const_int_operand(tcx, span, tcx.types.u32, Scalar::from_u32(INIT_FUNCTION_COUNTER)); + let count_code_region_fn = function_handle( + tcx, + tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None), + span, + ); + let counter_index = Operand::const_from_scalar( + tcx, + tcx.types.u32, + Scalar::from_u32(INIT_FUNCTION_COUNTER), + span, + ); let mut patch = MirPatch::new(body); @@ -68,38 +71,13 @@ pub fn instrument_coverage<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { body.basic_blocks_mut().swap(next_block, new_block); } -fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, span: Span, fn_def_id: DefId) -> Operand<'tcx> { +fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> { let ret_ty = tcx.fn_sig(fn_def_id).output(); let ret_ty = ret_ty.no_bound_vars().unwrap(); let substs = tcx.mk_substs(::std::iter::once(ty::subst::GenericArg::from(ret_ty))); Operand::function_handle(tcx, fn_def_id, substs, span) } -fn const_int_operand<'tcx>( - tcx: TyCtxt<'tcx>, - span: Span, - ty: Ty<'tcx>, - val: Scalar, -) -> Operand<'tcx> { - debug_assert!({ - let param_env_and_ty = ty::ParamEnv::empty().and(ty); - let type_size = tcx - .layout_of(param_env_and_ty) - .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e)) - .size; - let scalar_size = abi::Size::from_bytes(match val { - Scalar::Raw { size, .. } => size, - _ => panic!("Invalid scalar type {:?}", val), - }); - scalar_size == type_size - }); - Operand::Constant(box Constant { - span, - user_ty: None, - literal: ty::Const::from_scalar(tcx, val, ty), - }) -} - fn placeholder_block<'tcx>(source_info: SourceInfo) -> BasicBlockData<'tcx> { BasicBlockData { statements: vec![], diff --git a/src/test/mir-opt/instrument_coverage.rs b/src/test/mir-opt/instrument_coverage.rs new file mode 100644 index 0000000000000..e8c723b528a1a --- /dev/null +++ b/src/test/mir-opt/instrument_coverage.rs @@ -0,0 +1,19 @@ +// Test that the initial version of Rust coverage injects count_code_region() placeholder calls, +// at the top of each function. The placeholders are later converted into LLVM instrprof.increment +// intrinsics, during codegen. + +// compile-flags: -Zinstrument-coverage +// EMIT_MIR rustc.main.InstrumentCoverage.diff +// EMIT_MIR rustc.bar.InstrumentCoverage.diff +fn main() { + loop { + if bar() { + break; + } + } +} + +#[inline(never)] +fn bar() -> bool { + true +} diff --git a/src/test/mir-opt/instrument_coverage/rustc.bar.InstrumentCoverage.diff b/src/test/mir-opt/instrument_coverage/rustc.bar.InstrumentCoverage.diff new file mode 100644 index 0000000000000..d23bb93d951dc --- /dev/null +++ b/src/test/mir-opt/instrument_coverage/rustc.bar.InstrumentCoverage.diff @@ -0,0 +1,41 @@ +- // MIR for `bar` before InstrumentCoverage ++ // MIR for `bar` after InstrumentCoverage + + fn bar() -> bool { + let mut _0: bool; // return place in scope 0 at $DIR/instrument_coverage.rs:17:13: 17:17 ++ let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:17:1: 19:2 + + bb0: { ++ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:17:1: 19:2 ++ _1 = const std::intrinsics::count_code_region(const 0u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:17:1: 19:2 ++ // ty::Const ++ // + ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region} ++ // + val: Value(Scalar()) ++ // mir::Constant ++ // + span: $DIR/instrument_coverage.rs:17:1: 17:1 ++ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region}, val: Value(Scalar()) } ++ // ty::Const ++ // + ty: u32 ++ // + val: Value(Scalar(0x00000000)) ++ // mir::Constant ++ // + span: $DIR/instrument_coverage.rs:17:1: 17:1 ++ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) } ++ } ++ ++ bb1 (cleanup): { ++ resume; // scope 0 at $DIR/instrument_coverage.rs:17:1: 19:2 ++ } ++ ++ bb2: { ++ StorageDead(_1); // scope 0 at $DIR/instrument_coverage.rs:18:5: 18:9 + _0 = const true; // scope 0 at $DIR/instrument_coverage.rs:18:5: 18:9 + // ty::Const + // + ty: bool + // + val: Value(Scalar(0x01)) + // mir::Constant + // + span: $DIR/instrument_coverage.rs:18:5: 18:9 + // + literal: Const { ty: bool, val: Value(Scalar(0x01)) } + return; // scope 0 at $DIR/instrument_coverage.rs:19:2: 19:2 + } + } + diff --git a/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff b/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff new file mode 100644 index 0000000000000..d5d0f82495d1a --- /dev/null +++ b/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff @@ -0,0 +1,82 @@ +- // MIR for `main` before InstrumentCoverage ++ // MIR for `main` after InstrumentCoverage + + fn main() -> () { + let mut _0: (); // return place in scope 0 at $DIR/instrument_coverage.rs:8:11: 8:11 + let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 + let mut _2: bool; // in scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 + let mut _3: !; // in scope 0 at $DIR/instrument_coverage.rs:10:18: 12:10 ++ let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 + + bb0: { +- falseUnwind -> [real: bb1, cleanup: bb6]; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 ++ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 ++ _4 = const std::intrinsics::count_code_region(const 0u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 ++ // ty::Const ++ // + ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region} ++ // + val: Value(Scalar()) ++ // mir::Constant ++ // + span: $DIR/instrument_coverage.rs:8:1: 8:1 ++ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region}, val: Value(Scalar()) } ++ // ty::Const ++ // + ty: u32 ++ // + val: Value(Scalar(0x00000000)) ++ // mir::Constant ++ // + span: $DIR/instrument_coverage.rs:8:1: 8:1 ++ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) } + } + + bb1: { + StorageLive(_2); // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 + _2 = const bar() -> [return: bb2, unwind: bb6]; // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 + // ty::Const + // + ty: fn() -> bool {bar} + // + val: Value(Scalar()) + // mir::Constant + // + span: $DIR/instrument_coverage.rs:10:12: 10:15 + // + literal: Const { ty: fn() -> bool {bar}, val: Value(Scalar()) } + } + + bb2: { + FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 + switchInt(_2) -> [false: bb4, otherwise: bb3]; // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + } + + bb3: { + falseEdges -> [real: bb5, imaginary: bb4]; // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + } + + bb4: { + _1 = const (); // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + // ty::Const + // + ty: () + // + val: Value(Scalar()) + // mir::Constant + // + span: $DIR/instrument_coverage.rs:10:9: 12:10 + // + literal: Const { ty: (), val: Value(Scalar()) } + StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:13:5: 13:6 + goto -> bb0; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 + } + + bb5: { + _0 = const (); // scope 0 at $DIR/instrument_coverage.rs:11:13: 11:18 + // ty::Const + // + ty: () + // + val: Value(Scalar()) + // mir::Constant + // + span: $DIR/instrument_coverage.rs:11:13: 11:18 + // + literal: Const { ty: (), val: Value(Scalar()) } + StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:13:5: 13:6 + return; // scope 0 at $DIR/instrument_coverage.rs:14:2: 14:2 + } + + bb6 (cleanup): { + resume; // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 ++ } ++ ++ bb7: { ++ StorageDead(_4); // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 ++ falseUnwind -> [real: bb1, cleanup: bb6]; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 + } + } + From 163e5854562f5274f092d66318a5c805e18d83c5 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Wed, 10 Jun 2020 12:48:30 -0700 Subject: [PATCH 23/34] updated mir-opt test due to other recent changes to MIR --- .../rustc.main.InstrumentCoverage.diff | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff b/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff index d5d0f82495d1a..095246580409e 100644 --- a/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff +++ b/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff @@ -9,7 +9,7 @@ + let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 bb0: { -- falseUnwind -> [real: bb1, cleanup: bb6]; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 +- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 + StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 + _4 = const std::intrinsics::count_code_region(const 0u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 + // ty::Const @@ -28,7 +28,7 @@ bb1: { StorageLive(_2); // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 - _2 = const bar() -> [return: bb2, unwind: bb6]; // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 + _2 = const bar() -> [return: bb3, unwind: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 // ty::Const // + ty: fn() -> bool {bar} // + val: Value(Scalar()) @@ -37,16 +37,20 @@ // + literal: Const { ty: fn() -> bool {bar}, val: Value(Scalar()) } } - bb2: { - FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 - switchInt(_2) -> [false: bb4, otherwise: bb3]; // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + bb2 (cleanup): { + resume; // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 } bb3: { - falseEdges -> [real: bb5, imaginary: bb4]; // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 + switchInt(_2) -> [false: bb5, otherwise: bb4]; // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 } bb4: { + falseEdge -> [real: bb6, imaginary: bb5]; // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + } + + bb5: { _1 = const (); // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 // ty::Const // + ty: () @@ -58,7 +62,7 @@ goto -> bb0; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 } - bb5: { + bb6: { _0 = const (); // scope 0 at $DIR/instrument_coverage.rs:11:13: 11:18 // ty::Const // + ty: () @@ -68,15 +72,11 @@ // + literal: Const { ty: (), val: Value(Scalar()) } StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:13:5: 13:6 return; // scope 0 at $DIR/instrument_coverage.rs:14:2: 14:2 - } - - bb6 (cleanup): { - resume; // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 + } + + bb7: { + StorageDead(_4); // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 -+ falseUnwind -> [real: bb1, cleanup: bb6]; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 ++ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 } } From 98685a4bf2ef50c6d6a64ef3867a29994d5a4a25 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Mon, 15 Jun 2020 17:08:13 -0700 Subject: [PATCH 24/34] Add new `fn_span` to TerminatorKind::Call instance --- src/librustc_mir/transform/instrument_coverage.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/librustc_mir/transform/instrument_coverage.rs b/src/librustc_mir/transform/instrument_coverage.rs index fda7ad731fa27..c36614938e10f 100644 --- a/src/librustc_mir/transform/instrument_coverage.rs +++ b/src/librustc_mir/transform/instrument_coverage.rs @@ -57,6 +57,7 @@ pub fn instrument_coverage<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { destination: Some((Place::from(temp), new_block)), cleanup: None, from_hir_call: false, + fn_span: span, }, ); From 93022be9917a472f7a16410ce7cb2b76a5d3afdd Mon Sep 17 00:00:00 2001 From: David Wood Date: Sat, 13 Jun 2020 17:04:06 +0100 Subject: [PATCH 25/34] bootstrap: read config from $RUST_BOOTSTRAP_CONFIG This commit modifies bootstrap so that `config.toml` is read first from `RUST_BOOTSTRAP_CONFIG`, then `--config` and finally `config.toml` in the current directory. This is a subjective change, intended to improve the ergnomics when using "development shells" for rustc development (for example, using tools such as Nix) which set environment variables to ensure a reproducible environment (these development shells can then be version controlled). By optionally reading `config.toml` from an environment variable, a `config.toml` can be defined in the development shell and a path to it exposed in the `RUST_BOOTSTRAP_CONFIG` environment variable - avoiding the need to manually symlink the contents of this file to `config.toml` in the working directory. Signed-off-by: David Wood --- src/bootstrap/bootstrap.py | 3 ++- src/bootstrap/flags.rs | 10 ++-------- 2 files changed, 4 insertions(+), 9 deletions(-) diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index b7d0fac5be31f..969d16d11e81b 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -894,7 +894,7 @@ def bootstrap(help_triggered): build.clean = args.clean try: - toml_path = args.config or 'config.toml' + toml_path = os.getenv('RUST_BOOTSTRAP_CONFIG') or args.config or 'config.toml' if not os.path.exists(toml_path): toml_path = os.path.join(build.rust_root, toml_path) @@ -947,6 +947,7 @@ def bootstrap(help_triggered): env["SRC"] = build.rust_root env["BOOTSTRAP_PARENT_ID"] = str(os.getpid()) env["BOOTSTRAP_PYTHON"] = sys.executable + env["BOOTSTRAP_CONFIG"] = toml_path env["BUILD_DIR"] = build.build_dir env["RUSTC_BOOTSTRAP"] = '1' env["CARGO"] = build.cargo() diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index cfaa43f397095..47b983868285b 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -3,7 +3,7 @@ //! This module implements the command-line parsing of the build system which //! has various flags to configure how it's run. -use std::fs; +use std::env; use std::path::PathBuf; use std::process; @@ -433,13 +433,7 @@ Arguments: // Get any optional paths which occur after the subcommand let paths = matches.free[1..].iter().map(|p| p.into()).collect::>(); - let cfg_file = matches.opt_str("config").map(PathBuf::from).or_else(|| { - if fs::metadata("config.toml").is_ok() { - Some(PathBuf::from("config.toml")) - } else { - None - } - }); + let cfg_file = env::var_os("BOOTSTRAP_CONFIG").map(PathBuf::from); // All subcommands except `clean` can have an optional "Available paths" section if matches.opt_present("verbose") { From a19dfb573d18c8b937c159cda24a3bb40ca5082d Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 12 Jun 2020 14:01:47 +0200 Subject: [PATCH 26/34] Create new E0763 error code for unterminated byte constant --- src/librustc_error_codes/error_codes.rs | 1 + src/librustc_error_codes/error_codes/E0763.md | 13 +++++++++++++ src/librustc_parse/lexer/mod.rs | 11 +++++++++-- 3 files changed, 23 insertions(+), 2 deletions(-) create mode 100644 src/librustc_error_codes/error_codes/E0763.md diff --git a/src/librustc_error_codes/error_codes.rs b/src/librustc_error_codes/error_codes.rs index 3fb5e04efc922..1d6a579bdff32 100644 --- a/src/librustc_error_codes/error_codes.rs +++ b/src/librustc_error_codes/error_codes.rs @@ -442,6 +442,7 @@ E0758: include_str!("./error_codes/E0758.md"), E0760: include_str!("./error_codes/E0760.md"), E0761: include_str!("./error_codes/E0761.md"), E0762: include_str!("./error_codes/E0762.md"), +E0763: include_str!("./error_codes/E0763.md"), ; // E0006, // merged with E0005 // E0008, // cannot bind by-move into a pattern guard diff --git a/src/librustc_error_codes/error_codes/E0763.md b/src/librustc_error_codes/error_codes/E0763.md new file mode 100644 index 0000000000000..095b779f3e78a --- /dev/null +++ b/src/librustc_error_codes/error_codes/E0763.md @@ -0,0 +1,13 @@ +A byte constant wasn't correctly ended. + +Erroneous code example: + +```compile_fail,E0763 +let c = b'a; // error! +``` + +To fix this error, add the missing quote: + +``` +let c = b'a'; // ok! +``` diff --git a/src/librustc_parse/lexer/mod.rs b/src/librustc_parse/lexer/mod.rs index 84b3335a0f628..2e3cf4e746ae9 100644 --- a/src/librustc_parse/lexer/mod.rs +++ b/src/librustc_parse/lexer/mod.rs @@ -339,8 +339,15 @@ impl<'a> StringReader<'a> { } rustc_lexer::LiteralKind::Byte { terminated } => { if !terminated { - self.fatal_span_(start + BytePos(1), suffix_start, "unterminated byte constant") - .raise() + self.sess + .span_diagnostic + .struct_span_fatal_with_code( + self.mk_sp(start + BytePos(1), suffix_start), + "unterminated byte constant", + error_code!(E0763), + ) + .emit(); + FatalError.raise(); } (token::Byte, Mode::Byte, 2, 1) // b' ' } From bad252c9faebf55565091f50bad784a0a3f1e756 Mon Sep 17 00:00:00 2001 From: Guillaume Gomez Date: Fri, 12 Jun 2020 14:01:53 +0200 Subject: [PATCH 27/34] Update ui tests --- src/test/ui/parser/byte-literals.rs | 2 +- src/test/ui/parser/byte-literals.stderr | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/test/ui/parser/byte-literals.rs b/src/test/ui/parser/byte-literals.rs index dadf3971220f7..9683a83e72095 100644 --- a/src/test/ui/parser/byte-literals.rs +++ b/src/test/ui/parser/byte-literals.rs @@ -8,5 +8,5 @@ pub fn main() { b' '; //~ ERROR byte constant must be escaped b'''; //~ ERROR byte constant must be escaped b'é'; //~ ERROR byte constant must be ASCII - b'a //~ ERROR unterminated byte constant + b'a //~ ERROR unterminated byte constant [E0763] } diff --git a/src/test/ui/parser/byte-literals.stderr b/src/test/ui/parser/byte-literals.stderr index 53d50af88d33b..7bbdc07cd835f 100644 --- a/src/test/ui/parser/byte-literals.stderr +++ b/src/test/ui/parser/byte-literals.stderr @@ -34,7 +34,7 @@ error: byte constant must be ASCII. Use a \xHH escape for a non-ASCII byte LL | b'é'; | ^ -error: unterminated byte constant +error[E0763]: unterminated byte constant --> $DIR/byte-literals.rs:11:6 | LL | b'a @@ -42,3 +42,4 @@ LL | b'a error: aborting due to 7 previous errors +For more information about this error, try `rustc --explain E0763`. From f0a42332b8ed123a92e9f8e2c329cbd2ad1753e9 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 6 Jun 2020 11:56:58 +0200 Subject: [PATCH 28/34] memory access sanity checks: abort instead of panic --- src/libcore/intrinsics.rs | 17 ++++++++++++----- src/libcore/ptr/mod.rs | 28 +++++++++++++++++++++------- 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 85076a573b528..59809239678a0 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -2057,9 +2057,14 @@ pub unsafe fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize) { fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize); } - debug_assert!(is_aligned_and_not_null(src), "attempt to copy from unaligned or null pointer"); - debug_assert!(is_aligned_and_not_null(dst), "attempt to copy to unaligned or null pointer"); - debug_assert!(is_nonoverlapping(src, dst, count), "attempt to copy to overlapping memory"); + if cfg!(debug_assertions) + && !(is_aligned_and_not_null(src) + && is_aligned_and_not_null(dst) + && is_nonoverlapping(src, dst, count)) + { + // Not panicking to keep codegen impact smaller. + abort(); + } copy_nonoverlapping(src, dst, count) } @@ -2122,8 +2127,10 @@ pub unsafe fn copy(src: *const T, dst: *mut T, count: usize) { fn copy(src: *const T, dst: *mut T, count: usize); } - debug_assert!(is_aligned_and_not_null(src), "attempt to copy from unaligned or null pointer"); - debug_assert!(is_aligned_and_not_null(dst), "attempt to copy to unaligned or null pointer"); + if cfg!(debug_assertions) && !(is_aligned_and_not_null(src) && is_aligned_and_not_null(dst)) { + // Not panicking to keep codegen impact smaller. + abort(); + } copy(src, dst, count) } diff --git a/src/libcore/ptr/mod.rs b/src/libcore/ptr/mod.rs index 1be05d5effff3..fec638c947ce6 100644 --- a/src/libcore/ptr/mod.rs +++ b/src/libcore/ptr/mod.rs @@ -70,7 +70,7 @@ use crate::cmp::Ordering; use crate::fmt; use crate::hash; -use crate::intrinsics::{self, is_aligned_and_not_null, is_nonoverlapping}; +use crate::intrinsics::{self, abort, is_aligned_and_not_null, is_nonoverlapping}; use crate::mem::{self, MaybeUninit}; #[stable(feature = "rust1", since = "1.0.0")] @@ -420,9 +420,14 @@ pub unsafe fn swap(x: *mut T, y: *mut T) { #[inline] #[stable(feature = "swap_nonoverlapping", since = "1.27.0")] pub unsafe fn swap_nonoverlapping(x: *mut T, y: *mut T, count: usize) { - debug_assert!(is_aligned_and_not_null(x), "attempt to swap unaligned or null pointer"); - debug_assert!(is_aligned_and_not_null(y), "attempt to swap unaligned or null pointer"); - debug_assert!(is_nonoverlapping(x, y, count), "attempt to swap overlapping memory"); + if cfg!(debug_assertions) + && !(is_aligned_and_not_null(x) + && is_aligned_and_not_null(y) + && is_nonoverlapping(x, y, count)) + { + // Not panicking to keep codegen impact smaller. + abort(); + } let x = x as *mut u8; let y = y as *mut u8; @@ -838,7 +843,10 @@ pub unsafe fn read_unaligned(src: *const T) -> T { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn write(dst: *mut T, src: T) { - debug_assert!(is_aligned_and_not_null(dst), "attempt to write to unaligned or null pointer"); + if cfg!(debug_assertions) && !is_aligned_and_not_null(dst) { + // Not panicking to keep codegen impact smaller. + abort(); + } intrinsics::move_val_init(&mut *dst, src) } @@ -1003,7 +1011,10 @@ pub unsafe fn write_unaligned(dst: *mut T, src: T) { #[inline] #[stable(feature = "volatile", since = "1.9.0")] pub unsafe fn read_volatile(src: *const T) -> T { - debug_assert!(is_aligned_and_not_null(src), "attempt to read from unaligned or null pointer"); + if cfg!(debug_assertions) && !is_aligned_and_not_null(src) { + // Not panicking to keep codegen impact smaller. + abort(); + } intrinsics::volatile_load(src) } @@ -1072,7 +1083,10 @@ pub unsafe fn read_volatile(src: *const T) -> T { #[inline] #[stable(feature = "volatile", since = "1.9.0")] pub unsafe fn write_volatile(dst: *mut T, src: T) { - debug_assert!(is_aligned_and_not_null(dst), "attempt to write to unaligned or null pointer"); + if cfg!(debug_assertions) && !is_aligned_and_not_null(dst) { + // Not panicking to keep codegen impact smaller. + abort(); + } intrinsics::volatile_store(dst, src); } From 81c7ebd54418fe2f91be10b7371c7a3f5cca3771 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 6 Jun 2020 12:19:29 +0200 Subject: [PATCH 29/34] we can enable one more codegen test in debug mode now --- src/test/codegen/vec-clear.rs | 1 - src/test/codegen/vec-optimizes-away.rs | 1 - 2 files changed, 2 deletions(-) diff --git a/src/test/codegen/vec-clear.rs b/src/test/codegen/vec-clear.rs index b9ffce8b0cb3d..15bfe421e9d35 100644 --- a/src/test/codegen/vec-clear.rs +++ b/src/test/codegen/vec-clear.rs @@ -1,4 +1,3 @@ -// ignore-debug: the debug assertions get in the way // compile-flags: -O #![crate_type = "lib"] diff --git a/src/test/codegen/vec-optimizes-away.rs b/src/test/codegen/vec-optimizes-away.rs index ebede0908c6c4..9143fad234087 100644 --- a/src/test/codegen/vec-optimizes-away.rs +++ b/src/test/codegen/vec-optimizes-away.rs @@ -1,4 +1,3 @@ -// // ignore-debug: the debug assertions get in the way // no-system-llvm // compile-flags: -O From 2b7d8588668bc79a1855a2c335572a1ac8ceaf34 Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Tue, 16 Jun 2020 22:48:35 +0100 Subject: [PATCH 30/34] Add some comments related to place op typeck --- src/librustc_typeck/check/mod.rs | 7 +++---- src/librustc_typeck/check/place_op.rs | 6 ++++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index b1d32213b729e..fa7dd2156edfa 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -3219,10 +3219,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } } - // When there is an auto mutable borrow, it is equivalent to `&mut expr`, - // thus `expr` is ought to be typechecked with needs = [`Needs::MutPlace`]. - // However in many cases it might not be checked this way originally, e.g. - // the receiver of a method call. We need to fix them up. + // If there is an mutable auto-borrow, it is equivalent to `&mut `. + // In this case implicit use of `Deref` and `Index` within `` should + // instead be `DerefMut` and `IndexMut`, so fix those up. if autoborrow_mut { self.convert_place_derefs_to_mutable(expr); } diff --git a/src/librustc_typeck/check/place_op.rs b/src/librustc_typeck/check/place_op.rs index ce4b6f8baf917..d1c22cd1ac03e 100644 --- a/src/librustc_typeck/check/place_op.rs +++ b/src/librustc_typeck/check/place_op.rs @@ -11,10 +11,11 @@ use rustc_span::symbol::{sym, Ident}; use rustc_span::Span; impl<'a, 'tcx> FnCtxt<'a, 'tcx> { + /// Type-check `*oprnd_expr` with `oprnd_expr` type-checked already. pub(super) fn lookup_derefing( &self, expr: &hir::Expr<'_>, - oprnd: &'tcx hir::Expr<'tcx>, + oprnd_expr: &'tcx hir::Expr<'tcx>, oprnd_ty: Ty<'tcx>, ) -> Option> { if let Some(mt) = oprnd_ty.builtin_deref(true) { @@ -25,7 +26,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let method = self.register_infer_ok_obligations(ok); if let ty::Ref(region, _, hir::Mutability::Not) = method.sig.inputs()[0].kind { self.apply_adjustments( - oprnd, + oprnd_expr, vec![Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(region, AutoBorrowMutability::Not)), target: method.sig.inputs()[0], @@ -39,6 +40,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { Some(ty) } + /// Type-check `*base_expr[index_expr]` with `base_expr` and `index_expr` type-checked already. pub(super) fn lookup_indexing( &self, expr: &hir::Expr<'_>, From 1db44afecd892351ae91499b1baefee433bbc04b Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Tue, 16 Jun 2020 18:48:46 -0700 Subject: [PATCH 31/34] Ensure profiling runtime for -Zinstrument-coverage If config.toml `profiler = false`, the test/mir-opt/instrument_coverage test is ignored. Otherwise, this patch ensures the profiler_runtime is loaded when -Zinstrument-coverage is enabled. Confirmed that this works for MacOS. --- config.toml.example | 3 +- src/librustc_metadata/creader.rs | 4 +- src/test/mir-opt/instrument_coverage.rs | 1 + .../rustc.bar.InstrumentCoverage.diff | 22 ++++---- .../rustc.main.InstrumentCoverage.diff | 54 +++++++++---------- 5 files changed, 44 insertions(+), 40 deletions(-) diff --git a/config.toml.example b/config.toml.example index d995554913f84..bc6760334170b 100644 --- a/config.toml.example +++ b/config.toml.example @@ -209,7 +209,8 @@ # Build the sanitizer runtimes #sanitizers = false -# Build the profiler runtime +# Build the profiler runtime (required when compiling with options that depend +# on this runtime, such as `-C profile-generate` or `-Z instrument-coverage`). #profiler = false # Indicates whether the native libraries linked into Cargo will be statically diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index b8ebcd6c8a8ff..f3e4f40bd5a18 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -698,7 +698,9 @@ impl<'a> CrateLoader<'a> { } fn inject_profiler_runtime(&mut self) { - if (self.sess.opts.debugging_opts.profile || self.sess.opts.cg.profile_generate.enabled()) + if (self.sess.opts.debugging_opts.instrument_coverage + || self.sess.opts.debugging_opts.profile + || self.sess.opts.cg.profile_generate.enabled()) && !self.sess.opts.debugging_opts.no_profiler_runtime { info!("loading profiler"); diff --git a/src/test/mir-opt/instrument_coverage.rs b/src/test/mir-opt/instrument_coverage.rs index e8c723b528a1a..3fe010ef68fc3 100644 --- a/src/test/mir-opt/instrument_coverage.rs +++ b/src/test/mir-opt/instrument_coverage.rs @@ -2,6 +2,7 @@ // at the top of each function. The placeholders are later converted into LLVM instrprof.increment // intrinsics, during codegen. +// needs-profiler-support // compile-flags: -Zinstrument-coverage // EMIT_MIR rustc.main.InstrumentCoverage.diff // EMIT_MIR rustc.bar.InstrumentCoverage.diff diff --git a/src/test/mir-opt/instrument_coverage/rustc.bar.InstrumentCoverage.diff b/src/test/mir-opt/instrument_coverage/rustc.bar.InstrumentCoverage.diff index d23bb93d951dc..1e64379aa0e4b 100644 --- a/src/test/mir-opt/instrument_coverage/rustc.bar.InstrumentCoverage.diff +++ b/src/test/mir-opt/instrument_coverage/rustc.bar.InstrumentCoverage.diff @@ -2,40 +2,40 @@ + // MIR for `bar` after InstrumentCoverage fn bar() -> bool { - let mut _0: bool; // return place in scope 0 at $DIR/instrument_coverage.rs:17:13: 17:17 -+ let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:17:1: 19:2 + let mut _0: bool; // return place in scope 0 at $DIR/instrument_coverage.rs:18:13: 18:17 ++ let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2 bb0: { -+ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:17:1: 19:2 -+ _1 = const std::intrinsics::count_code_region(const 0u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:17:1: 19:2 ++ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2 ++ _1 = const std::intrinsics::count_code_region(const 0u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2 + // ty::Const + // + ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region} + // + val: Value(Scalar()) + // mir::Constant -+ // + span: $DIR/instrument_coverage.rs:17:1: 17:1 ++ // + span: $DIR/instrument_coverage.rs:18:1: 18:1 + // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region}, val: Value(Scalar()) } + // ty::Const + // + ty: u32 + // + val: Value(Scalar(0x00000000)) + // mir::Constant -+ // + span: $DIR/instrument_coverage.rs:17:1: 17:1 ++ // + span: $DIR/instrument_coverage.rs:18:1: 18:1 + // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) } + } + + bb1 (cleanup): { -+ resume; // scope 0 at $DIR/instrument_coverage.rs:17:1: 19:2 ++ resume; // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2 + } + + bb2: { -+ StorageDead(_1); // scope 0 at $DIR/instrument_coverage.rs:18:5: 18:9 - _0 = const true; // scope 0 at $DIR/instrument_coverage.rs:18:5: 18:9 ++ StorageDead(_1); // scope 0 at $DIR/instrument_coverage.rs:19:5: 19:9 + _0 = const true; // scope 0 at $DIR/instrument_coverage.rs:19:5: 19:9 // ty::Const // + ty: bool // + val: Value(Scalar(0x01)) // mir::Constant - // + span: $DIR/instrument_coverage.rs:18:5: 18:9 + // + span: $DIR/instrument_coverage.rs:19:5: 19:9 // + literal: Const { ty: bool, val: Value(Scalar(0x01)) } - return; // scope 0 at $DIR/instrument_coverage.rs:19:2: 19:2 + return; // scope 0 at $DIR/instrument_coverage.rs:20:2: 20:2 } } diff --git a/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff b/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff index 095246580409e..82d21467827eb 100644 --- a/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff +++ b/src/test/mir-opt/instrument_coverage/rustc.main.InstrumentCoverage.diff @@ -2,81 +2,81 @@ + // MIR for `main` after InstrumentCoverage fn main() -> () { - let mut _0: (); // return place in scope 0 at $DIR/instrument_coverage.rs:8:11: 8:11 - let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 - let mut _2: bool; // in scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 - let mut _3: !; // in scope 0 at $DIR/instrument_coverage.rs:10:18: 12:10 -+ let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 + let mut _0: (); // return place in scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11 + let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2 + let mut _2: bool; // in scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17 + let mut _3: !; // in scope 0 at $DIR/instrument_coverage.rs:11:18: 13:10 ++ let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2 bb0: { -- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 -+ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 -+ _4 = const std::intrinsics::count_code_region(const 0u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 +- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6 ++ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2 ++ _4 = const std::intrinsics::count_code_region(const 0u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2 + // ty::Const + // + ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region} + // + val: Value(Scalar()) + // mir::Constant -+ // + span: $DIR/instrument_coverage.rs:8:1: 8:1 ++ // + span: $DIR/instrument_coverage.rs:9:1: 9:1 + // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32) {std::intrinsics::count_code_region}, val: Value(Scalar()) } + // ty::Const + // + ty: u32 + // + val: Value(Scalar(0x00000000)) + // mir::Constant -+ // + span: $DIR/instrument_coverage.rs:8:1: 8:1 ++ // + span: $DIR/instrument_coverage.rs:9:1: 9:1 + // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) } } bb1: { - StorageLive(_2); // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 - _2 = const bar() -> [return: bb3, unwind: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 + StorageLive(_2); // scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17 + _2 = const bar() -> [return: bb3, unwind: bb2]; // scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17 // ty::Const // + ty: fn() -> bool {bar} // + val: Value(Scalar()) // mir::Constant - // + span: $DIR/instrument_coverage.rs:10:12: 10:15 + // + span: $DIR/instrument_coverage.rs:11:12: 11:15 // + literal: Const { ty: fn() -> bool {bar}, val: Value(Scalar()) } } bb2 (cleanup): { - resume; // scope 0 at $DIR/instrument_coverage.rs:8:1: 14:2 + resume; // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2 } bb3: { - FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/instrument_coverage.rs:10:12: 10:17 - switchInt(_2) -> [false: bb5, otherwise: bb4]; // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17 + switchInt(_2) -> [false: bb5, otherwise: bb4]; // scope 0 at $DIR/instrument_coverage.rs:11:9: 13:10 } bb4: { - falseEdge -> [real: bb6, imaginary: bb5]; // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + falseEdge -> [real: bb6, imaginary: bb5]; // scope 0 at $DIR/instrument_coverage.rs:11:9: 13:10 } bb5: { - _1 = const (); // scope 0 at $DIR/instrument_coverage.rs:10:9: 12:10 + _1 = const (); // scope 0 at $DIR/instrument_coverage.rs:11:9: 13:10 // ty::Const // + ty: () // + val: Value(Scalar()) // mir::Constant - // + span: $DIR/instrument_coverage.rs:10:9: 12:10 + // + span: $DIR/instrument_coverage.rs:11:9: 13:10 // + literal: Const { ty: (), val: Value(Scalar()) } - StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:13:5: 13:6 - goto -> bb0; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 + StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:14:5: 14:6 + goto -> bb0; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6 } bb6: { - _0 = const (); // scope 0 at $DIR/instrument_coverage.rs:11:13: 11:18 + _0 = const (); // scope 0 at $DIR/instrument_coverage.rs:12:13: 12:18 // ty::Const // + ty: () // + val: Value(Scalar()) // mir::Constant - // + span: $DIR/instrument_coverage.rs:11:13: 11:18 + // + span: $DIR/instrument_coverage.rs:12:13: 12:18 // + literal: Const { ty: (), val: Value(Scalar()) } - StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:13:5: 13:6 - return; // scope 0 at $DIR/instrument_coverage.rs:14:2: 14:2 + StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:14:5: 14:6 + return; // scope 0 at $DIR/instrument_coverage.rs:15:2: 15:2 + } + + bb7: { -+ StorageDead(_4); // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 -+ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:9:5: 13:6 ++ StorageDead(_4); // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6 ++ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6 } } From c3387293d4ff050cce2c7f3a79c8e7b040515f8c Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Wed, 17 Jun 2020 10:29:00 -0700 Subject: [PATCH 32/34] Update src/libcore/intrinsics.rs Co-authored-by: bjorn3 --- src/libcore/intrinsics.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 3806d3ae25487..2d3e181466105 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1947,7 +1947,7 @@ extern "rust-intrinsic" { /// generation. #[cfg(not(bootstrap))] #[lang = "count_code_region"] - pub fn count_code_region(_index: u32); + pub fn count_code_region(index: u32); } // Some functions are defined here because they accidentally got made From b9f0304af8b74bdf01cd39f1603d73bdc599de79 Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Wed, 17 Jun 2020 11:24:43 -0700 Subject: [PATCH 33/34] temporarily enable mac and windows tests on bors try testing platform-specific changes --- src/ci/azure-pipelines/try.yml | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/ci/azure-pipelines/try.yml b/src/ci/azure-pipelines/try.yml index 38a0685e0f75a..b57b691641ca5 100644 --- a/src/ci/azure-pipelines/try.yml +++ b/src/ci/azure-pipelines/try.yml @@ -82,3 +82,33 @@ jobs: # INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-extended --enable-profiler # SCRIPT: python x.py dist # DEPLOY_ALT: 1 + +- job: macOS + timeoutInMinutes: 600 + pool: + vmImage: macos-10.15 + steps: + - template: steps/run.yml + strategy: + matrix: + x86_64-apple: + SCRIPT: ./x.py test + INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc + RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 + MACOSX_DEPLOYMENT_TARGET: 10.8 + MACOSX_STD_DEPLOYMENT_TARGET: 10.7 + NO_LLVM_ASSERTIONS: 1 + NO_DEBUG_ASSERTIONS: 1 +- job: Windows + timeoutInMinutes: 600 + pool: + vmImage: 'vs2017-win2016' + steps: + - template: steps/run.yml + strategy: + matrix: + x86_64-msvc: + INITIAL_RUST_CONFIGURE_ARGS: > + --build=x86_64-pc-windows-msvc + --enable-profiler + SCRIPT: python x.py test From 36c9014ddd3e2ac6b6a0e9f623e791281c40473d Mon Sep 17 00:00:00 2001 From: Rich Kadel Date: Wed, 17 Jun 2020 14:50:28 -0700 Subject: [PATCH 34/34] removed try config to test mac & windows (passed) https://dev.azure.com/rust-lang/rust/_build/results?buildId=32224&view=results --- src/ci/azure-pipelines/try.yml | 30 ------------------------------ 1 file changed, 30 deletions(-) diff --git a/src/ci/azure-pipelines/try.yml b/src/ci/azure-pipelines/try.yml index b57b691641ca5..38a0685e0f75a 100644 --- a/src/ci/azure-pipelines/try.yml +++ b/src/ci/azure-pipelines/try.yml @@ -82,33 +82,3 @@ jobs: # INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-pc-windows-msvc --enable-extended --enable-profiler # SCRIPT: python x.py dist # DEPLOY_ALT: 1 - -- job: macOS - timeoutInMinutes: 600 - pool: - vmImage: macos-10.15 - steps: - - template: steps/run.yml - strategy: - matrix: - x86_64-apple: - SCRIPT: ./x.py test - INITIAL_RUST_CONFIGURE_ARGS: --build=x86_64-apple-darwin --enable-sanitizers --enable-profiler --set rust.jemalloc - RUSTC_RETRY_LINKER_ON_SEGFAULT: 1 - MACOSX_DEPLOYMENT_TARGET: 10.8 - MACOSX_STD_DEPLOYMENT_TARGET: 10.7 - NO_LLVM_ASSERTIONS: 1 - NO_DEBUG_ASSERTIONS: 1 -- job: Windows - timeoutInMinutes: 600 - pool: - vmImage: 'vs2017-win2016' - steps: - - template: steps/run.yml - strategy: - matrix: - x86_64-msvc: - INITIAL_RUST_CONFIGURE_ARGS: > - --build=x86_64-pc-windows-msvc - --enable-profiler - SCRIPT: python x.py test