cherry-pick(#28360): Revert "chore(test runner): remove fake skipped test results (#27762)" (#28368)

This reverts commit 210168e36d.

Fixes #28321.
This commit is contained in:
Dmitry Gozman 2023-11-27 19:01:48 -08:00 committed by GitHub
parent 18478e325f
commit 43798aff92
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 72 additions and 111 deletions

View File

@ -239,9 +239,6 @@ export class TestCase extends Base implements reporterTypes.TestCase {
_poolDigest = '';
_workerHash = '';
_projectId = '';
// This is different from |results.length| because sometimes we do not run the test, but consume
// an attempt, for example when skipping tests in a serial suite after a failure.
_runAttempts = 0;
// Annotations known statically before running the test, e.g. `test.skip()` or `test.describe.skip()`.
_staticAnnotations: Annotation[] = [];
@ -259,10 +256,16 @@ export class TestCase extends Base implements reporterTypes.TestCase {
}
outcome(): 'skipped' | 'expected' | 'unexpected' | 'flaky' {
const results = this.results.filter(result => result.status !== 'interrupted');
if (results.every(result => result.status === 'skipped'))
// Ignore initial skips that may be a result of "skipped because previous test in serial mode failed".
const results = [...this.results];
while (results[0]?.status === 'skipped' || results[0]?.status === 'interrupted')
results.shift();
// All runs were skipped.
if (!results.length)
return 'skipped';
const failures = results.filter(result => result.status !== this.expectedStatus);
const failures = results.filter(result => result.status !== 'skipped' && result.status !== 'interrupted' && result.status !== this.expectedStatus);
if (!failures.length) // all passed
return 'expected';
if (failures.length === results.length) // all failed

View File

@ -492,10 +492,16 @@ export class TeleTestCase implements reporterTypes.TestCase {
}
outcome(): 'skipped' | 'expected' | 'unexpected' | 'flaky' {
const results = this.results.filter(result => result.status !== 'interrupted');
if (results.every(result => result.status === 'skipped'))
// Ignore initial skips that may be a result of "skipped because previous test in serial mode failed".
const results = [...this.results];
while (results[0]?.status === 'skipped' || results[0]?.status === 'interrupted')
results.shift();
// All runs were skipped.
if (!results.length)
return 'skipped';
const failures = results.filter(result => result.status !== this.expectedStatus);
const failures = results.filter(result => result.status !== 'skipped' && result.status !== 'interrupted' && result.status !== this.expectedStatus);
if (!failures.length) // all passed
return 'expected';
if (failures.length === results.length) // all failed

View File

@ -290,7 +290,7 @@ class JobDispatcher {
test.expectedStatus = params.expectedStatus;
test.annotations = params.annotations;
test.timeout = params.timeout;
const isFailure = result.status !== test.expectedStatus;
const isFailure = result.status !== 'skipped' && result.status !== test.expectedStatus;
if (isFailure)
this._failedTests.add(test);
this._reportTestEnd(test, result);
@ -369,17 +369,22 @@ class JobDispatcher {
}
result.errors = [...errors];
result.error = result.errors[0];
result.status = 'failed';
result.status = errors.length ? 'failed' : 'skipped';
this._reportTestEnd(test, result);
this._failedTests.add(test);
}
private _handleFatalErrors(errors: TestError[]) {
const test = this._remainingByTestId.values().next().value as TestCase | undefined;
if (test) {
this._failTestWithErrors(test, errors);
private _massSkipTestsFromRemaining(testIds: Set<string>, errors: TestError[]) {
for (const test of this._remainingByTestId.values()) {
if (!testIds.has(test.id))
continue;
if (!this._failureTracker.hasReachedMaxFailures()) {
this._failTestWithErrors(test, errors);
errors = []; // Only report errors for the first test.
}
this._remainingByTestId.delete(test.id);
} else if (errors.length) {
}
if (errors.length) {
// We had fatal errors after all tests have passed - most likely in some teardown.
// Let's just fail the test run.
this._failureTracker.onWorkerError();
@ -407,28 +412,23 @@ class JobDispatcher {
}
if (params.fatalErrors.length) {
// In case of fatal errors, report the first remaining test as failing with these errors,
// and "skip" all other tests to avoid running into the same issue over and over.
this._handleFatalErrors(params.fatalErrors);
this._remainingByTestId.clear();
// In case of fatal errors, report first remaining test as failing with these errors,
// and all others as skipped.
this._massSkipTestsFromRemaining(new Set(this._remainingByTestId.keys()), params.fatalErrors);
}
// Handle tests that should be skipped because of the setup failure.
for (const testId of params.skipTestsDueToSetupFailure)
this._remainingByTestId.delete(testId);
this._massSkipTestsFromRemaining(new Set(params.skipTestsDueToSetupFailure), []);
if (params.unexpectedExitError) {
if (this._currentlyRunning) {
// When worker exits during a test, we blame the test itself.
this._failTestWithErrors(this._currentlyRunning.test, [params.unexpectedExitError]);
this._remainingByTestId.delete(this._currentlyRunning.test.id);
} else {
// The most common situation when worker exits while not running a test is:
// worker failed to require the test file (at the start) because of an exception in one of imports.
// In this case, "skip" all remaining tests, to avoid running into the same exception over and over.
this._handleFatalErrors([params.unexpectedExitError]);
this._remainingByTestId.clear();
}
// When worker exits during a test, we blame the test itself.
//
// The most common situation when worker exits while not running a test is:
// worker failed to require the test file (at the start) because of an exception in one of imports.
// In this case, "skip" all remaining tests, to avoid running into the same exception over and over.
if (this._currentlyRunning)
this._massSkipTestsFromRemaining(new Set([this._currentlyRunning.test.id]), [params.unexpectedExitError]);
else
this._massSkipTestsFromRemaining(new Set(this._remainingByTestId.keys()), [params.unexpectedExitError]);
}
const retryCandidates = new Set<TestCase>();
@ -446,22 +446,26 @@ class JobDispatcher {
serialSuitesWithFailures.add(outermostSerialSuite);
}
// If we have failed tests that belong to a serial suite,
// we should skip all future tests from the same serial suite.
const testsBelongingToSomeSerialSuiteWithFailures = [...this._remainingByTestId.values()].filter(test => {
let parent: Suite | undefined = test.parent;
while (parent && !serialSuitesWithFailures.has(parent))
parent = parent.parent;
return !!parent;
});
this._massSkipTestsFromRemaining(new Set(testsBelongingToSomeSerialSuiteWithFailures.map(test => test.id)), []);
for (const serialSuite of serialSuitesWithFailures) {
serialSuite.allTests().forEach(test => {
// Skip remaining tests from serial suites with failures.
this._remainingByTestId.delete(test.id);
// Schedule them for the retry all together.
retryCandidates.add(test);
});
// Add all tests from failed serial suites for possible retry.
// These will only be retried together, because they have the same
// "retries" setting and the same number of previous runs.
serialSuite.allTests().forEach(test => retryCandidates.add(test));
}
for (const test of this._job.tests) {
if (!this._remainingByTestId.has(test.id))
++test._runAttempts;
}
const remaining = [...this._remainingByTestId.values()];
for (const test of retryCandidates) {
if (test._runAttempts < test.retries + 1)
if (test.results.length < test.retries + 1)
remaining.push(test);
}

View File

@ -31,7 +31,7 @@ export class FailureTracker {
}
onTestEnd(test: TestCase, result: TestResult) {
if (result.status !== test.expectedStatus)
if (result.status !== 'skipped' && result.status !== test.expectedStatus)
++this._failureCount;
}

View File

@ -399,7 +399,7 @@ test('beforeAll failure should prevent the test, but not afterAll', async ({ run
});
expect(result.exitCode).toBe(1);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(1);
expect(result.skipped).toBe(1);
expect(result.outputLines).toEqual([
'beforeAll',
'afterAll',
@ -499,7 +499,7 @@ test('beforeAll timeout should be reported and prevent more tests', async ({ run
}, { timeout: 1000 });
expect(result.exitCode).toBe(1);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(1);
expect(result.skipped).toBe(1);
expect(result.outputLines).toEqual([
'beforeAll',
'afterAll',
@ -688,7 +688,7 @@ test('unhandled rejection during beforeAll should be reported and prevent more t
});
expect(result.exitCode).toBe(1);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(1);
expect(result.skipped).toBe(1);
expect(result.outputLines).toEqual([
'beforeAll',
'afterAll',
@ -801,7 +801,7 @@ test('beforeAll failure should only prevent tests that are affected', async ({ r
});
expect(result.exitCode).toBe(1);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(1);
expect(result.skipped).toBe(1);
expect(result.passed).toBe(1);
expect(result.outputLines).toEqual([
'beforeAll',

View File

@ -216,8 +216,8 @@ test('should retry beforeAll failure', async ({ runInlineTest }) => {
expect(result.exitCode).toBe(1);
expect(result.passed).toBe(0);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(1);
expect(result.output.split('\n')[2]).toBe('××F');
expect(result.skipped).toBe(1);
expect(result.output.split('\n')[2]).toBe('×°×°F°');
expect(result.output).toContain('BeforeAll is bugged!');
});

View File

@ -111,7 +111,7 @@ test('should report subprocess creation error', async ({ runInlineTest }, testIn
expect(result.exitCode).toBe(1);
expect(result.passed).toBe(0);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(1);
expect(result.skipped).toBe(1);
expect(result.output).toContain('Error: worker process exited unexpectedly (code=42, signal=null)');
});
@ -634,9 +634,9 @@ test('should not hang on worker error in test file', async ({ runInlineTest }) =
`,
}, { 'timeout': 3000 });
expect(result.exitCode).toBe(1);
expect(result.results).toHaveLength(1);
expect(result.results[0].status).toBe('failed');
expect(result.results[0].error.message).toContain('Error: worker process exited unexpectedly');
expect(result.results[1].status).toBe('skipped');
});
test('fast double SIGINT should be ignored', async ({ interactWithTestRunner }) => {

View File

@ -640,7 +640,7 @@ test('static modifiers should be added in serial mode', async ({ runInlineTest }
});
expect(result.exitCode).toBe(1);
expect(result.passed).toBe(0);
expect(result.didNotRun).toBe(3);
expect(result.skipped).toBe(3);
expect(result.report.suites[0].specs[0].tests[0].annotations).toEqual([{ type: 'slow' }]);
expect(result.report.suites[0].specs[1].tests[0].annotations).toEqual([{ type: 'fixme' }]);
expect(result.report.suites[0].specs[2].tests[0].annotations).toEqual([{ type: 'skip' }]);

View File

@ -47,7 +47,7 @@ test('test.describe.serial should work', async ({ runInlineTest }) => {
expect(result.exitCode).toBe(1);
expect(result.passed).toBe(2);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(2);
expect(result.skipped).toBe(2);
expect(result.outputLines).toEqual([
'test1',
'test2',
@ -87,7 +87,7 @@ test('test.describe.serial should work in describe', async ({ runInlineTest }) =
expect(result.exitCode).toBe(1);
expect(result.passed).toBe(2);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(2);
expect(result.skipped).toBe(2);
expect(result.outputLines).toEqual([
'test1',
'test2',
@ -128,7 +128,7 @@ test('test.describe.serial should work with retry', async ({ runInlineTest }) =>
expect(result.passed).toBe(2);
expect(result.flaky).toBe(1);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(1);
expect(result.skipped).toBe(1);
expect(result.outputLines).toEqual([
'test1',
'test2',
@ -272,7 +272,7 @@ test('test.describe.serial should work with test.fail', async ({ runInlineTest }
expect(result.exitCode).toBe(1);
expect(result.passed).toBe(2);
expect(result.failed).toBe(1);
expect(result.didNotRun).toBe(1);
expect(result.skipped).toBe(1);
expect(result.outputLines).toEqual([
'zero',
'one',
@ -394,55 +394,3 @@ test('test.describe.serial should work with fullyParallel', async ({ runInlineTe
'two',
]);
});
test('serial fail + skip is failed', async ({ runInlineTest }) => {
const result = await runInlineTest({
'a.test.ts': `
import { test, expect } from '@playwright/test';
test.describe.configure({ mode: 'serial', retries: 1 });
test.describe.serial('serial suite', () => {
test('one', async () => {
expect(test.info().retry).toBe(0);
});
test('two', async () => {
expect(1).toBe(2);
});
test('three', async () => {
});
});
`,
}, { workers: 1 });
expect(result.exitCode).toBe(1);
expect(result.passed).toBe(0);
expect(result.skipped).toBe(0);
expect(result.flaky).toBe(1);
expect(result.failed).toBe(1);
expect(result.interrupted).toBe(0);
expect(result.didNotRun).toBe(1);
});
test('serial skip + fail is failed', async ({ runInlineTest }) => {
const result = await runInlineTest({
'a.test.ts': `
import { test, expect } from '@playwright/test';
test.describe.configure({ mode: 'serial', retries: 1 });
test.describe.serial('serial suite', () => {
test('one', async () => {
expect(test.info().retry).toBe(1);
});
test('two', async () => {
expect(1).toBe(2);
});
test('three', async () => {
});
});
`,
}, { workers: 1 });
expect(result.exitCode).toBe(1);
expect(result.passed).toBe(0);
expect(result.skipped).toBe(0);
expect(result.flaky).toBe(1);
expect(result.failed).toBe(1);
expect(result.interrupted).toBe(0);
expect(result.didNotRun).toBe(1);
});