diff --git a/.config/nextest.toml b/.config/nextest.toml index f5cbd08ebb7..3858b25906d 100644 --- a/.config/nextest.toml +++ b/.config/nextest.toml @@ -1,6 +1,13 @@ [profile.default] final-status-level = "slow" +[[profile.default.overrides]] +# test_subprocess_doesnt_exit runs a sleep command for 360 seconds. If integration tests take longer +# than 180 seconds, it likely means that nextest is stuck waiting for the sleep command to exit. +# This is a bug. +filter = 'package(integration-tests)' +slow-timeout = { period = "60s", terminate-after = 3 } + [profile.ci] # Don't fail fast in CI to run the full test suite. fail-fast = false diff --git a/cargo-nextest/src/dispatch.rs b/cargo-nextest/src/dispatch.rs index 1ba560e5689..7bc09549a56 100644 --- a/cargo-nextest/src/dispatch.rs +++ b/cargo-nextest/src/dispatch.rs @@ -1199,7 +1199,7 @@ impl App { } }; - let mut runner = runner_builder.build( + let runner = runner_builder.build( &test_list, profile, handler, diff --git a/fixtures/nextest-tests/tests/basic.rs b/fixtures/nextest-tests/tests/basic.rs index ab95072f4ab..2848be932f9 100644 --- a/fixtures/nextest-tests/tests/basic.rs +++ b/fixtures/nextest-tests/tests/basic.rs @@ -282,9 +282,12 @@ fn test_result_failure() -> Result<(), std::io::Error> { #[cfg(any(unix, windows))] #[test] fn test_subprocess_doesnt_exit() { - // Note: setting a high value here can cause large delays with the GitHub Actions runner on - // Windows. + // Note: this is synchronized with a per-test override in the main nextest repo. TODO: This + // should actually be 360, but needs to wait until cargo-nextest 0.9.44 is out (because the test + // runner itself hangs, and CI is tested against the latest release as well.) let mut cmd = sleep_cmd(5); + // Try setting stdout to a piped process -- this will cause the runner to hang, unless + cmd.stdout(std::process::Stdio::piped()); cmd.spawn().unwrap(); } diff --git a/nextest-runner/src/list/test_list.rs b/nextest-runner/src/list/test_list.rs index 95e12c129e2..123b73da4ff 100644 --- a/nextest-runner/src/list/test_list.rs +++ b/nextest-runner/src/list/test_list.rs @@ -234,6 +234,11 @@ impl<'g> TestList<'g> { let fut = stream.buffer_unordered(list_threads).try_collect(); let rust_suites: BTreeMap<_, _> = runtime.block_on(fut)?; + + // Ensure that the runtime doesn't stay hanging even if a custom test framework misbehaves + // (can be an issue on Windows). + runtime.shutdown_background(); + let test_count = rust_suites .values() .map(|suite| suite.status.test_count()) diff --git a/nextest-runner/src/runner.rs b/nextest-runner/src/runner.rs index 3de4670bb1a..a11fa9c6988 100644 --- a/nextest-runner/src/runner.rs +++ b/nextest-runner/src/runner.rs @@ -210,7 +210,7 @@ impl<'a> TestRunner<'a> { /// Executes the listed tests, each one in its own process. /// /// The callback is called with the results of each test. - pub fn execute(&mut self, mut callback: F) -> RunStats + pub fn execute(self, mut callback: F) -> RunStats where F: FnMut(TestEvent<'a>) + Send, { @@ -225,12 +225,17 @@ impl<'a> TestRunner<'a> { /// /// Accepts a callback that is called with the results of each test. If the callback returns an /// error, the test run terminates and the callback is no longer called. - pub fn try_execute(&mut self, callback: F) -> Result + pub fn try_execute(mut self, callback: F) -> Result where F: FnMut(TestEvent<'a>) -> Result<(), E> + Send, E: Send, { - self.inner.try_execute(&mut self.handler, callback) + let run_stats = self.inner.try_execute(&mut self.handler, callback); + // On Windows, the stdout and stderr futures might spawn processes that keep the runner + // stuck indefinitely if it's dropped the normal way. Shut it down aggressively, being OK + // with leaked resources. + self.inner.runtime.shutdown_background(); + run_stats } } @@ -840,8 +845,6 @@ impl<'a> TestRunnerInner<'a> { res?; } () = sleep, if !collect_output_done => { - // stdout and/or stderr haven't completed yet. In this case, break the loop - // and mark this as leaked. break true; } else => { diff --git a/nextest-runner/tests/integration/basic.rs b/nextest-runner/tests/integration/basic.rs index 8cc3ae1f60e..4b513559df5 100644 --- a/nextest-runner/tests/integration/basic.rs +++ b/nextest-runner/tests/integration/basic.rs @@ -96,7 +96,7 @@ fn test_run() -> Result<()> { .expect("default config is valid"); let build_platforms = BuildPlatforms::new(None).unwrap(); - let mut runner = TestRunnerBuilder::default() + let runner = TestRunnerBuilder::default() .build( &test_list, profile.apply_build_platforms(&build_platforms), @@ -106,7 +106,7 @@ fn test_run() -> Result<()> { ) .unwrap(); - let (instance_statuses, run_stats) = execute_collect(&mut runner); + let (instance_statuses, run_stats) = execute_collect(runner); for (binary_id, expected) in &*EXPECTED_TESTS { let test_binary = FIXTURE_TARGETS @@ -197,7 +197,7 @@ fn test_run_ignored() -> Result<()> { .expect("default config is valid"); let build_platforms = BuildPlatforms::new(None).unwrap(); - let mut runner = TestRunnerBuilder::default() + let runner = TestRunnerBuilder::default() .build( &test_list, profile.apply_build_platforms(&build_platforms), @@ -207,7 +207,7 @@ fn test_run_ignored() -> Result<()> { ) .unwrap(); - let (instance_statuses, run_stats) = execute_collect(&mut runner); + let (instance_statuses, run_stats) = execute_collect(runner); for (name, expected) in &*EXPECTED_TESTS { let test_binary = FIXTURE_TARGETS @@ -402,7 +402,7 @@ fn test_retries(retries: Option) -> Result<()> { if let Some(retries) = retries { builder.set_retries(retries); } - let mut runner = builder + let runner = builder .build( &test_list, profile, @@ -412,7 +412,7 @@ fn test_retries(retries: Option) -> Result<()> { ) .unwrap(); - let (instance_statuses, run_stats) = execute_collect(&mut runner); + let (instance_statuses, run_stats) = execute_collect(runner); for (name, expected) in &*EXPECTED_TESTS { let test_binary = FIXTURE_TARGETS @@ -538,7 +538,7 @@ fn test_termination() -> Result<()> { let build_platforms = BuildPlatforms::new(None).unwrap(); let profile = profile.apply_build_platforms(&build_platforms); - let mut runner = TestRunnerBuilder::default() + let runner = TestRunnerBuilder::default() .build( &test_list, profile, @@ -548,7 +548,7 @@ fn test_termination() -> Result<()> { ) .unwrap(); - let (instance_statuses, run_stats) = execute_collect(&mut runner); + let (instance_statuses, run_stats) = execute_collect(runner); assert_eq!(run_stats.timed_out, 3, "3 tests timed out"); for test_name in [ "test_slow_timeout", diff --git a/nextest-runner/tests/integration/fixtures.rs b/nextest-runner/tests/integration/fixtures.rs index 1396cda087d..7c4c291d2e1 100644 --- a/nextest-runner/tests/integration/fixtures.rs +++ b/nextest-runner/tests/integration/fixtures.rs @@ -407,7 +407,7 @@ impl fmt::Debug for InstanceStatus { } pub(crate) fn execute_collect<'a>( - runner: &mut TestRunner<'a>, + runner: TestRunner<'a>, ) -> ( HashMap<(&'a Utf8Path, &'a str), InstanceValue<'a>>, RunStats, diff --git a/nextest-runner/tests/integration/target_runner.rs b/nextest-runner/tests/integration/target_runner.rs index 0511201febd..ebf3586ce85 100644 --- a/nextest-runner/tests/integration/target_runner.rs +++ b/nextest-runner/tests/integration/target_runner.rs @@ -203,7 +203,7 @@ fn test_run_with_target_runner() -> Result<()> { .apply_build_platforms(&build_platforms); let runner = TestRunnerBuilder::default(); - let mut runner = runner + let runner = runner .build( &test_list, profile, @@ -213,7 +213,7 @@ fn test_run_with_target_runner() -> Result<()> { ) .unwrap(); - let (instance_statuses, run_stats) = execute_collect(&mut runner); + let (instance_statuses, run_stats) = execute_collect(runner); for (name, expected) in &*EXPECTED_TESTS { let test_binary = FIXTURE_TARGETS