Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(test): skip internal stack frames for errors #14302

Merged
merged 8 commits into from
Apr 18, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 2 additions & 9 deletions cli/lsp/testing/execution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ use crate::checksum;
use crate::create_main_worker;
use crate::emit;
use crate::flags;
use crate::fmt_errors::PrettyJsError;
use crate::located_script_name;
use crate::lsp::client::Client;
use crate::lsp::client::TestingNotification;
Expand Down Expand Up @@ -799,10 +798,7 @@ impl test::TestReporter for LspTestReporter {
})
}
test::TestResult::Failed(js_error) => {
let err_string = PrettyJsError::create(*js_error.clone())
.to_string()
.trim_start_matches("Uncaught ")
.to_string();
let err_string = test::format_test_error(js_error);
self.progress(lsp_custom::TestRunProgressMessage::Failed {
test: desc.into(),
messages: as_test_messages(err_string, false),
Expand Down Expand Up @@ -846,10 +842,7 @@ impl test::TestReporter for LspTestReporter {
}
test::TestStepResult::Failed(js_error) => {
let messages = if let Some(js_error) = js_error {
let err_string = PrettyJsError::create(*js_error.clone())
.to_string()
.trim_start_matches("Uncaught ")
.to_string();
let err_string = test::format_test_error(js_error);
as_test_messages(err_string, false)
} else {
vec![]
Expand Down
5 changes: 2 additions & 3 deletions cli/tests/testdata/test/aggregate_error.out
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@ failures:
AggregateError
Error: Error 1
at [WILDCARD]/testdata/test/aggregate_error.ts:2:18
[WILDCARD]
Error: Error 2
at [WILDCARD]/testdata/test/aggregate_error.ts:3:18
[WILDCARD]
throw new AggregateError([error1, error2]);
^
at [WILDCARD]/testdata/test/aggregate_error.ts:5:9
at [WILDCARD]

failures:

Expand Down
3 changes: 0 additions & 3 deletions cli/tests/testdata/test/exit_sanitizer.out
Original file line number Diff line number Diff line change
Expand Up @@ -12,23 +12,20 @@ AssertionError: Test case attempted to exit with exit code: 0
^
at [WILDCARD]
at [WILDCARD]/test/exit_sanitizer.ts:2:8
at [WILDCARD]

./test/exit_sanitizer.ts > exit(1)
AssertionError: Test case attempted to exit with exit code: 1
Deno.exit(1);
^
at [WILDCARD]
at [WILDCARD]/test/exit_sanitizer.ts:6:8
at [WILDCARD]

./test/exit_sanitizer.ts > exit(2)
AssertionError: Test case attempted to exit with exit code: 2
Deno.exit(2);
^
at [WILDCARD]
at [WILDCARD]/test/exit_sanitizer.ts:10:8
at [WILDCARD]

failures:

Expand Down
10 changes: 0 additions & 10 deletions cli/tests/testdata/test/fail.out
Original file line number Diff line number Diff line change
Expand Up @@ -18,70 +18,60 @@ Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:2:9
at [WILDCARD]

./test/fail.ts > test 1
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:5:9
at [WILDCARD]

./test/fail.ts > test 2
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:8:9
at [WILDCARD]

./test/fail.ts > test 3
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:11:9
at [WILDCARD]

./test/fail.ts > test 4
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:14:9
at [WILDCARD]

./test/fail.ts > test 5
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:17:9
at [WILDCARD]

./test/fail.ts > test 6
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:20:9
at [WILDCARD]

./test/fail.ts > test 7
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:23:9
at [WILDCARD]

./test/fail.ts > test 8
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:26:9
at [WILDCARD]

./test/fail.ts > test 9
Error
throw new Error();
^
at [WILDCARD]/test/fail.ts:29:9
at [WILDCARD]

failures:

Expand Down
1 change: 0 additions & 1 deletion cli/tests/testdata/test/fail_fast.out
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ Error
throw new Error();
^
at [WILDCARD]/test/fail_fast.ts:2:9
at [WILDCARD]

failures:

Expand Down
1 change: 0 additions & 1 deletion cli/tests/testdata/test/finally_timeout.out
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ Error: fail
throw new Error("fail");
^
at [WILDCARD]/test/finally_timeout.ts:4:11
at [WILDCARD]

failures:

Expand Down
1 change: 0 additions & 1 deletion cli/tests/testdata/test/steps/failing_steps.out
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,6 @@ Error: Fail test.
throw new Error("Fail test.");
^
at [WILDCARD]/failing_steps.ts:[WILDCARD]
at [WILDCARD]

failures:

Expand Down
24 changes: 7 additions & 17 deletions cli/tests/testdata/test/steps/invalid_usage.out
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,14 @@ inner missing await ...
step ...
inner ... pending ([WILDCARD])
Error: Parent scope completed before test step finished execution. Ensure all steps are awaited (ex. `await t.step(...)`).
at postValidation [WILDCARD]
at testStepSanitizer [WILDCARD]
at [WILDCARD]
at async TestContext.step [WILDCARD]
FAILED ([WILDCARD])
Error: There were still test steps running after the current scope finished execution. Ensure all steps are awaited (ex. `await t.step(...)`).
await t.step("step", (t) => {
^
at postValidation [WILDCARD]
at testStepSanitizer [WILDCARD]
at [WILDCARD]
at async fn ([WILDCARD]/invalid_usage.ts:[WILDCARD])
at async Object.testStepSanitizer [WILDCARD]
FAILED ([WILDCARD])
parallel steps with sanitizers ...
step 1 ... pending ([WILDCARD])
Expand All @@ -28,10 +26,8 @@ parallel steps with sanitizers ...
* parallel steps with sanitizers > step 1
await t.step("step 2", () => {});
^
at preValidation ([WILDCARD])
at testStepSanitizer ([WILDCARD])
at [WILDCARD]/invalid_usage.ts:[WILDCARD]
at [WILDCARD]
at [WILDCARD]/invalid_usage.ts:[WILDCARD]
FAILED ([WILDCARD])
parallel steps when first has sanitizer ...
step 1 ... pending ([WILDCARD])
Expand All @@ -40,10 +36,8 @@ parallel steps when first has sanitizer ...
* parallel steps when first has sanitizer > step 1
await t.step({
^
at preValidation ([WILDCARD])
at testStepSanitizer ([WILDCARD])
at [WILDCARD]/invalid_usage.ts:[WILDCARD]
at [WILDCARD]
at [WILDCARD]/invalid_usage.ts:[WILDCARD]
FAILED ([WILDCARD])
parallel steps when second has sanitizer ...
step 1 ... ok ([WILDCARD])
Expand All @@ -52,10 +46,8 @@ parallel steps when second has sanitizer ...
* parallel steps when second has sanitizer > step 1
await t.step({
^
at preValidation ([WILDCARD])
at testStepSanitizer ([WILDCARD])
at [WILDCARD]/invalid_usage.ts:[WILDCARD]
at [WILDCARD]
at [WILDCARD]/invalid_usage.ts:[WILDCARD]
FAILED ([WILDCARD])
parallel steps where only inner tests have sanitizers ...
step 1 ...
Expand All @@ -67,8 +59,7 @@ parallel steps where only inner tests have sanitizers ...
* parallel steps where only inner tests have sanitizers > step 1
await t.step({
^
at preValidation ([WILDCARD])
at testStepSanitizer ([WILDCARD])
at [WILDCARD]
at [WILDCARD]/invalid_usage.ts:[WILDCARD]
FAILED ([WILDCARD])
FAILED ([WILDCARD])
Expand All @@ -81,7 +72,6 @@ Error: Cannot run test step after parent scope has finished execution. Ensure an
^
at TestContext.step ([WILDCARD])
at [WILDCARD]/invalid_usage.ts:[WILDCARD]
at [WILDCARD]

./test/steps/invalid_usage.ts > top level missing await
Error: There were still test steps running after the current scope finished execution. Ensure all steps are awaited (ex. `await t.step(...)`).
Expand Down
70 changes: 61 additions & 9 deletions cli/tools/test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -301,10 +301,7 @@ impl PrettyTestReporter {
);

if let Some(js_error) = result.error() {
let err_string = PrettyJsError::create(js_error.clone())
.to_string()
.trim_start_matches("Uncaught ")
.to_string();
let err_string = format_test_error(js_error);
for line in err_string.lines() {
println!("{}{}", " ".repeat(description.level + 1), line);
}
Expand Down Expand Up @@ -464,11 +461,7 @@ impl TestReporter for PrettyTestReporter {
colors::gray(">"),
description.name
);
let err_string = PrettyJsError::create(*js_error.clone())
.to_string()
.trim_start_matches("Uncaught ")
.to_string();
println!("{}", err_string);
println!("{}", format_test_error(js_error));
println!();
}

Expand Down Expand Up @@ -525,6 +518,65 @@ impl TestReporter for PrettyTestReporter {
}
}

fn abbreviate_test_error(js_error: &JsError) -> JsError {
let mut js_error = js_error.clone();
let frames = std::mem::take(&mut js_error.frames);

// check if there are any stack frames coming from user code
let should_filter = frames.iter().any(|f| {
if let Some(file_name) = &f.file_name {
!(file_name.starts_with("[deno:") || file_name.starts_with("deno:"))
} else {
true
}
});

if should_filter {
let mut frames = frames
.into_iter()
.rev()
.skip_while(|f| {
if let Some(file_name) = &f.file_name {
file_name.starts_with("[deno:") || file_name.starts_with("deno:")
} else {
false
}
})
.into_iter()
.collect::<Vec<_>>();
frames.reverse();
js_error.frames = frames;
} else {
js_error.frames = frames;
}

js_error.cause = js_error
.cause
.as_ref()
.map(|e| Box::new(abbreviate_test_error(e)));
js_error.aggregated = js_error
.aggregated
.as_ref()
.map(|es| es.iter().map(abbreviate_test_error).collect());
js_error
}

// This function maps JsError to PrettyJsError and applies some changes
// specifically for test runner purposes:
//
// - filter out stack frames:
// - if stack trace consists of mixed user and internal code, the frames
// below the first user code frame are filtered out
// - if stack trace consists only of internal code it is preserved as is
pub fn format_test_error(js_error: &JsError) -> String {
let mut js_error = abbreviate_test_error(js_error);
js_error.exception_message = js_error
.exception_message
.trim_start_matches("Uncaught ")
.to_string();
PrettyJsError::create(js_error).to_string()
}

fn create_reporter(
concurrent: bool,
echo_output: bool,
Expand Down