Merge pull request #19183 from atom/ns/upload-windows-crashes-on-release-branches

On Azure DevOps, upload Windows crash dumps to S3 on release branches
This commit is contained in:
Nathan Sobo 2019-04-19 13:52:47 -06:00 committed by GitHub
commit 406e033323
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 52 additions and 2 deletions

View File

@ -4,7 +4,7 @@ const fs = require('fs')
const path = require('path')
const aws = require('aws-sdk')
module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets) {
module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets, acl = 'public-read') {
const s3 = new aws.S3({
accessKeyId: s3Key,
secretAccessKey: s3Secret,
@ -37,7 +37,7 @@ module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets) {
console.info(`Uploading ${assetPath}`)
const params = {
Key: `${directory}${path.basename(assetPath)}`,
ACL: 'public-read',
ACL: acl,
Body: fs.createReadStream(assetPath)
}

View File

@ -116,6 +116,17 @@ jobs:
displayName: Publish crash reports on non-release branch
condition: and(failed(), eq(variables['ATOM_RELEASES_S3_KEY'], ''))
- script: >
node $(Build.SourcesDirectory)\script\vsts\upload-crash-reports.js --crash-report-path "%ARTIFACT_STAGING_DIR%\crash-reports" --s3-path "vsts-artifacts/%BUILD_ID%/"
env:
ATOM_RELEASES_S3_KEY: $(ATOM_RELEASES_S3_KEY)
ATOM_RELEASES_S3_SECRET: $(ATOM_RELEASES_S3_SECRET)
ATOM_RELEASES_S3_BUCKET: $(ATOM_RELEASES_S3_BUCKET)
ARTIFACT_STAGING_DIR: $(Build.ArtifactStagingDirectory)
BUILD_ID: $(Build.BuildId)
displayName: Upload crash reports to S3 on release branch
condition: and(failed(), ne(variables['ATOM_RELEASES_S3_KEY'], ''))
- task: PublishBuildArtifacts@1
inputs:
PathtoPublish: $(Build.SourcesDirectory)/out/atom-x64-windows.zip

View File

@ -0,0 +1,39 @@
'use strict'
const glob = require('glob')
const uploadToS3 = require('./lib/upload-to-s3')
const yargs = require('yargs')
const argv = yargs
.usage('Usage: $0 [options]')
.help('help')
.describe('crash-report-path', 'The local path of a directory containing crash reports to upload')
.describe('s3-path', 'Indicates the S3 path in which the crash reports should be uploaded')
.wrap(yargs.terminalWidth())
.argv
async function uploadCrashReports () {
const crashesPath = argv.crashReportPath
const crashes = glob.sync('/*.dmp', { root: crashesPath })
const bucketPath = argv.s3Path
if (crashes && crashes.length > 0) {
console.log(`Uploading ${crashes.length} private crash reports to S3 under '${bucketPath}'`)
await uploadToS3(
process.env.ATOM_RELEASES_S3_KEY,
process.env.ATOM_RELEASES_S3_SECRET,
process.env.ATOM_RELEASES_S3_BUCKET,
bucketPath,
crashes,
'private'
)
}
}
// Wrap the call the async function and catch errors from its promise because
// Node.js doesn't yet allow use of await at the script scope
uploadCrashReports().catch(err => {
console.error('An error occurred while uploading crash reports:\n\n', err)
process.exit(1)
})