mirror of
https://github.com/zed-industries/zed.git
synced 2024-12-29 13:03:27 +03:00
Merge pull request #2376 from zed-industries/randomized-tests-runner
Add an Actions workflow that repeatedly runs the randomized integration tests
This commit is contained in:
commit
516964280b
43
.github/workflows/randomized_tests.yml
vendored
Normal file
43
.github/workflows/randomized_tests.yml
vendored
Normal file
@ -0,0 +1,43 @@
|
||||
name: Randomized Tests
|
||||
|
||||
concurrency: randomized-tests
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- randomized-tests-runner
|
||||
schedule:
|
||||
- cron: '0 * * * *'
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_SERVER_URL: https://zed.dev
|
||||
ZED_CLIENT_SECRET_TOKEN: ${{ secrets.ZED_CLIENT_SECRET_TOKEN }}
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: Run randomized tests
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- randomized-tests
|
||||
steps:
|
||||
- name: Install Rust
|
||||
run: |
|
||||
rustup set profile minimal
|
||||
rustup update stable
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
clean: false
|
||||
submodules: 'recursive'
|
||||
|
||||
- name: Run randomized tests
|
||||
run: script/randomized-test-ci
|
@ -76,7 +76,7 @@ pub fn run_test(
|
||||
let seed = atomic_seed.load(SeqCst);
|
||||
|
||||
if is_randomized {
|
||||
dbg!(seed);
|
||||
eprintln!("seed = {seed}");
|
||||
}
|
||||
|
||||
let deterministic = executor::Deterministic::new(seed);
|
||||
|
63
script/randomized-test-ci
Executable file
63
script/randomized-test-ci
Executable file
@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env node --redirect-warnings=/dev/null
|
||||
|
||||
const fs = require('fs')
|
||||
const {randomBytes} = require('crypto')
|
||||
const {execFileSync} = require('child_process')
|
||||
const {minimizeTestPlan, buildTests, runTests} = require('./randomized-test-minimize');
|
||||
|
||||
const {ZED_SERVER_URL, ZED_CLIENT_SECRET_TOKEN} = process.env
|
||||
if (!ZED_SERVER_URL) throw new Error('Missing env var `ZED_SERVER_URL`')
|
||||
if (!ZED_CLIENT_SECRET_TOKEN) throw new Error('Missing env var `ZED_CLIENT_SECRET_TOKEN`')
|
||||
|
||||
main()
|
||||
|
||||
async function main() {
|
||||
buildTests()
|
||||
|
||||
const seed = randomU64();
|
||||
const commit = execFileSync(
|
||||
'git',
|
||||
['rev-parse', 'HEAD'],
|
||||
{encoding: 'utf8'}
|
||||
).trim()
|
||||
|
||||
console.log("commit:", commit)
|
||||
console.log("starting seed:", seed)
|
||||
|
||||
const planPath = 'target/test-plan.json'
|
||||
const minPlanPath = 'target/test-plan.min.json'
|
||||
const failingSeed = runTests({
|
||||
SEED: seed,
|
||||
SAVE_PLAN: planPath,
|
||||
ITERATIONS: 50000,
|
||||
OPERATIONS: 200,
|
||||
})
|
||||
|
||||
if (!failingSeed) {
|
||||
console.log("tests passed")
|
||||
return
|
||||
}
|
||||
|
||||
console.log("found failure at seed", failingSeed)
|
||||
const minimizedSeed = minimizeTestPlan(planPath, minPlanPath)
|
||||
const minimizedPlan = JSON.parse(fs.readFileSync(minPlanPath, 'utf8'))
|
||||
|
||||
const url = `${ZED_SERVER_URL}/api/randomized_test_failure`
|
||||
const body = {
|
||||
seed: minimizedSeed,
|
||||
token: ZED_CLIENT_SECRET_TOKEN,
|
||||
plan: minimizedPlan,
|
||||
commit: commit,
|
||||
}
|
||||
await fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {"Content-Type": "application/json"},
|
||||
body: JSON.stringify(body)
|
||||
})
|
||||
}
|
||||
|
||||
function randomU64() {
|
||||
const bytes = randomBytes(8)
|
||||
const hexString = bytes.reduce(((string, byte) => string + byte.toString(16)), '')
|
||||
return BigInt('0x' + hexString).toString(10)
|
||||
}
|
132
script/randomized-test-minimize
Executable file
132
script/randomized-test-minimize
Executable file
@ -0,0 +1,132 @@
|
||||
#!/usr/bin/env node --redirect-warnings=/dev/null
|
||||
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const {spawnSync} = require('child_process')
|
||||
|
||||
const FAILING_SEED_REGEX = /failing seed: (\d+)/ig
|
||||
const CARGO_TEST_ARGS = [
|
||||
'--release',
|
||||
'--lib',
|
||||
'--package', 'collab',
|
||||
'random_collaboration',
|
||||
]
|
||||
|
||||
if (require.main === module) {
|
||||
if (process.argv.length < 4) {
|
||||
process.stderr.write("usage: script/randomized-test-minimize <input-plan> <output-plan> [start-index]\n")
|
||||
process.exit(1)
|
||||
}
|
||||
|
||||
minimizeTestPlan(
|
||||
process.argv[2],
|
||||
process.argv[3],
|
||||
parseInt(process.argv[4]) || 0
|
||||
);
|
||||
}
|
||||
|
||||
function minimizeTestPlan(
|
||||
inputPlanPath,
|
||||
outputPlanPath,
|
||||
startIndex = 0
|
||||
) {
|
||||
const tempPlanPath = inputPlanPath + '.try'
|
||||
|
||||
fs.copyFileSync(inputPlanPath, outputPlanPath)
|
||||
let testPlan = JSON.parse(fs.readFileSync(outputPlanPath, 'utf8'))
|
||||
|
||||
process.stderr.write("minimizing failing test plan...\n")
|
||||
for (let ix = startIndex; ix < testPlan.length; ix++) {
|
||||
// Skip 'MutateClients' entries, since they themselves are not single operations.
|
||||
if (testPlan[ix].MutateClients) {
|
||||
continue
|
||||
}
|
||||
|
||||
// Remove a row from the test plan
|
||||
const newTestPlan = testPlan.slice()
|
||||
newTestPlan.splice(ix, 1)
|
||||
fs.writeFileSync(tempPlanPath, serializeTestPlan(newTestPlan), 'utf8');
|
||||
|
||||
process.stderr.write(`${ix}/${testPlan.length}: ${JSON.stringify(testPlan[ix])}`)
|
||||
const failingSeed = runTests({
|
||||
SEED: '0',
|
||||
LOAD_PLAN: tempPlanPath,
|
||||
SAVE_PLAN: tempPlanPath,
|
||||
ITERATIONS: '500'
|
||||
})
|
||||
|
||||
// If the test failed, keep the test plan with the removed row. Reload the test
|
||||
// plan from the JSON file, since the test itself will remove any operations
|
||||
// which are no longer valid before saving the test plan.
|
||||
if (failingSeed != null) {
|
||||
process.stderr.write(` - remove. failing seed: ${failingSeed}.\n`)
|
||||
fs.copyFileSync(tempPlanPath, outputPlanPath)
|
||||
testPlan = JSON.parse(fs.readFileSync(outputPlanPath, 'utf8'))
|
||||
ix--
|
||||
} else {
|
||||
process.stderr.write(` - keep.\n`)
|
||||
}
|
||||
}
|
||||
|
||||
fs.unlinkSync(tempPlanPath)
|
||||
|
||||
// Re-run the final minimized plan to get the correct failing seed.
|
||||
// This is a workaround for the fact that the execution order can
|
||||
// slightly change when replaying a test plan after it has been
|
||||
// saved and loaded.
|
||||
const failingSeed = runTests({
|
||||
SEED: '0',
|
||||
ITERATIONS: '5000',
|
||||
LOAD_PLAN: outputPlanPath,
|
||||
})
|
||||
|
||||
process.stderr.write(`final test plan: ${outputPlanPath}\n`)
|
||||
process.stderr.write(`final seed: ${failingSeed}\n`)
|
||||
return failingSeed
|
||||
}
|
||||
|
||||
function buildTests() {
|
||||
const {status} = spawnSync('cargo', ['test', '--no-run', ...CARGO_TEST_ARGS], {
|
||||
stdio: 'inherit',
|
||||
encoding: 'utf8',
|
||||
env: {
|
||||
...process.env,
|
||||
}
|
||||
});
|
||||
if (status !== 0) {
|
||||
throw new Error('build failed')
|
||||
}
|
||||
}
|
||||
|
||||
function runTests(env) {
|
||||
const {status, stdout} = spawnSync('cargo', ['test', ...CARGO_TEST_ARGS], {
|
||||
stdio: 'pipe',
|
||||
encoding: 'utf8',
|
||||
env: {
|
||||
...process.env,
|
||||
...env,
|
||||
}
|
||||
});
|
||||
|
||||
if (status !== 0) {
|
||||
FAILING_SEED_REGEX.lastIndex = 0
|
||||
const match = FAILING_SEED_REGEX.exec(stdout)
|
||||
if (!match) {
|
||||
process.stderr.write("test failed, but no failing seed found:\n")
|
||||
process.stderr.write(stdout)
|
||||
process.stderr.write('\n')
|
||||
process.exit(1)
|
||||
}
|
||||
return match[1]
|
||||
} else {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
function serializeTestPlan(plan) {
|
||||
return "[\n" + plan.map(row => JSON.stringify(row)).join(",\n") + "\n]\n"
|
||||
}
|
||||
|
||||
exports.buildTests = buildTests
|
||||
exports.runTests = runTests
|
||||
exports.minimizeTestPlan = minimizeTestPlan
|
Loading…
Reference in New Issue
Block a user