Removed old update-server and vsts

This commit is contained in:
Maurício Szabo 2022-11-03 21:27:55 -03:00
parent e2baad9a61
commit f6f4069523
31 changed files with 0 additions and 8991 deletions

View File

@ -1,61 +0,0 @@
# Atom Update Test Server
This folder contains a simple implementation of Atom's update server to be used for testing the update process with local builds.
## Prerequisites
On macOS, you will need to configure a "Mac Development" certificate for your local machine so that the `script/build --test-sign` parameter will work. Here are the steps to set one up:
1. Install Xcode if it isn't already
1. Launch Xcode and open the Preferences dialog (<kbd>Cmd + ,</kbd>)
1. Switch to the Accounts tab
1. If you don't already see your Apple account in the leftmost column, click the `+` button at the bottom left of the window, select "Apple ID" and then click Continue. Sign in with your Apple account and then you'll be sent back to the Accounts tab.
1. Click the "Manage Certificates..." button in the lower right of the Accounts page
1. Click the `+` button in the lower left of the Signing Certificates popup and then select "Mac Development"
1. A new certificate should now be in the list of the Signing Certificates window with the name of your macOS machine. Click "Done"
1. In a Terminal, verify that your Mac Development certificate is set up by running
```
security find-certificate -c 'Mac Developer'
```
If it returns a lot of information with "Mac Developer: your@apple-id-email.com" inside of it, your certificate is configured correctly and you're now ready to run an Atom build with the `--test-sign` parameter.
## How to use it
1. Since you probably want to try upgrading an installed Atom release to a newer version, start your shell and set the `ATOM_RELEASE_VERSION` environment var to the version that you want the server to advertise as the latest version:
**Windows**
```
set ATOM_RELEASE_VERSION="1.32.0-beta1"
```
**macOS**
```
export ATOM_RELEASE_VERSION="1.32.0-beta1"
```
2. Run a full build of Atom such that the necessary release artifacts are in the `out` folder:
**Windows**
```
script/build --create-windows-installer
```
**macOS**
```
script/build --compress-artifacts --test-sign
```
3. Start up the server in this folder:
```
npm install
npm start
```
**NOTE:** You can customize the port by setting the `PORT` environment variable.
4. Start Atom from the command line with the `ATOM_UPDATE_URL_PREFIX` environment variable set to `http://localhost:3456` (change this to reflect any `PORT` override you might have used)
5. Open the About page and try to update Atom. The update server will write output to the console when requests are received.

View File

@ -1,378 +0,0 @@
{
"name": "atom-test-update-server",
"version": "0.1.0",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
"accepts": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.5.tgz",
"integrity": "sha1-63d99gEXI6OxTopywIBcjoZ0a9I=",
"requires": {
"mime-types": "~2.1.18",
"negotiator": "0.6.1"
}
},
"array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
"integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI="
},
"body-parser": {
"version": "1.18.2",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.18.2.tgz",
"integrity": "sha1-h2eKGdhLR9hZuDGZvVm84iKxBFQ=",
"requires": {
"bytes": "3.0.0",
"content-type": "~1.0.4",
"debug": "2.6.9",
"depd": "~1.1.1",
"http-errors": "~1.6.2",
"iconv-lite": "0.4.19",
"on-finished": "~2.3.0",
"qs": "6.5.1",
"raw-body": "2.3.2",
"type-is": "~1.6.15"
}
},
"bytes": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz",
"integrity": "sha1-0ygVQE1olpn4Wk6k+odV3ROpYEg="
},
"colors": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/colors/-/colors-1.3.2.tgz",
"integrity": "sha512-rhP0JSBGYvpcNQj4s5AdShMeE5ahMop96cTeDl/v9qQQm2fYClE2QXZRi8wLzc+GmXSxdIqqbOIAhyObEXDbfQ=="
},
"content-disposition": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.2.tgz",
"integrity": "sha1-DPaLud318r55YcOoUXjLhdunjLQ="
},
"content-type": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz",
"integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA=="
},
"cookie": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.3.1.tgz",
"integrity": "sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s="
},
"cookie-signature": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
"integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw="
},
"debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"requires": {
"ms": "2.0.0"
}
},
"depd": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz",
"integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak="
},
"destroy": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz",
"integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA="
},
"ee-first": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
"integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0="
},
"encodeurl": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
"integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k="
},
"escape-html": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
"integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg="
},
"etag": {
"version": "1.8.1",
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
"integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc="
},
"express": {
"version": "4.16.3",
"resolved": "http://registry.npmjs.org/express/-/express-4.16.3.tgz",
"integrity": "sha1-avilAjUNsyRuzEvs9rWjTSL37VM=",
"requires": {
"accepts": "~1.3.5",
"array-flatten": "1.1.1",
"body-parser": "1.18.2",
"content-disposition": "0.5.2",
"content-type": "~1.0.4",
"cookie": "0.3.1",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "~1.1.2",
"encodeurl": "~1.0.2",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"finalhandler": "1.1.1",
"fresh": "0.5.2",
"merge-descriptors": "1.0.1",
"methods": "~1.1.2",
"on-finished": "~2.3.0",
"parseurl": "~1.3.2",
"path-to-regexp": "0.1.7",
"proxy-addr": "~2.0.3",
"qs": "6.5.1",
"range-parser": "~1.2.0",
"safe-buffer": "5.1.1",
"send": "0.16.2",
"serve-static": "1.13.2",
"setprototypeof": "1.1.0",
"statuses": "~1.4.0",
"type-is": "~1.6.16",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
}
},
"finalhandler": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.1.tgz",
"integrity": "sha512-Y1GUDo39ez4aHAw7MysnUD5JzYX+WaIj8I57kO3aEPT1fFRL4sr7mjei97FgnwhAyyzRYmQZaTHb2+9uZ1dPtg==",
"requires": {
"debug": "2.6.9",
"encodeurl": "~1.0.2",
"escape-html": "~1.0.3",
"on-finished": "~2.3.0",
"parseurl": "~1.3.2",
"statuses": "~1.4.0",
"unpipe": "~1.0.0"
}
},
"forwarded": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz",
"integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ="
},
"fresh": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
"integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac="
},
"http-errors": {
"version": "1.6.3",
"resolved": "http://registry.npmjs.org/http-errors/-/http-errors-1.6.3.tgz",
"integrity": "sha1-i1VoC7S+KDoLW/TqLjhYC+HZMg0=",
"requires": {
"depd": "~1.1.2",
"inherits": "2.0.3",
"setprototypeof": "1.1.0",
"statuses": ">= 1.4.0 < 2"
}
},
"iconv-lite": {
"version": "0.4.19",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.19.tgz",
"integrity": "sha512-oTZqweIP51xaGPI4uPa56/Pri/480R+mo7SeU+YETByQNhDG55ycFyNLIgta9vXhILrxXDmF7ZGhqZIcuN0gJQ=="
},
"inherits": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4="
},
"ipaddr.js": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.8.0.tgz",
"integrity": "sha1-6qM9bd16zo9/b+DJygRA5wZzix4="
},
"media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
"integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g="
},
"merge-descriptors": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
"integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E="
},
"methods": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz",
"integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4="
},
"mime": {
"version": "1.4.1",
"resolved": "https://registry.npmjs.org/mime/-/mime-1.4.1.tgz",
"integrity": "sha512-KI1+qOZu5DcW6wayYHSzR/tXKCDC5Om4s1z2QJjDULzLcmf3DvzS7oluY4HCTrc+9FiKmWUgeNLg7W3uIQvxtQ=="
},
"mime-db": {
"version": "1.36.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.36.0.tgz",
"integrity": "sha512-L+xvyD9MkoYMXb1jAmzI/lWYAxAMCPvIBSWur0PZ5nOf5euahRLVqH//FKW9mWp2lkqUgYiXPgkzfMUFi4zVDw=="
},
"mime-types": {
"version": "2.1.20",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.20.tgz",
"integrity": "sha512-HrkrPaP9vGuWbLK1B1FfgAkbqNjIuy4eHlIYnFi7kamZyLLrGlo2mpcx0bBmNpKqBtYtAfGbodDddIgddSJC2A==",
"requires": {
"mime-db": "~1.36.0"
}
},
"ms": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g="
},
"negotiator": {
"version": "0.6.1",
"resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.1.tgz",
"integrity": "sha1-KzJxhOiZIQEXeyhWP7XnECrNDKk="
},
"on-finished": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz",
"integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=",
"requires": {
"ee-first": "1.1.1"
}
},
"parseurl": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.2.tgz",
"integrity": "sha1-/CidTtiZMRlGDBViUyYs3I3mW/M="
},
"path-to-regexp": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
},
"proxy-addr": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.4.tgz",
"integrity": "sha512-5erio2h9jp5CHGwcybmxmVqHmnCBZeewlfJ0pex+UW7Qny7OOZXTtH56TGNyBizkgiOwhJtMKrVzDTeKcySZwA==",
"requires": {
"forwarded": "~0.1.2",
"ipaddr.js": "1.8.0"
}
},
"qs": {
"version": "6.5.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.5.1.tgz",
"integrity": "sha512-eRzhrN1WSINYCDCbrz796z37LOe3m5tmW7RQf6oBntukAG1nmovJvhnwHHRMAfeoItc1m2Hk02WER2aQ/iqs+A=="
},
"range-parser": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.0.tgz",
"integrity": "sha1-9JvmtIeJTdxA3MlKMi9hEJLgDV4="
},
"raw-body": {
"version": "2.3.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.3.2.tgz",
"integrity": "sha1-vNYMd9Prk83gBQKVw/N5OJvIj4k=",
"requires": {
"bytes": "3.0.0",
"http-errors": "1.6.2",
"iconv-lite": "0.4.19",
"unpipe": "1.0.0"
},
"dependencies": {
"depd": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/depd/-/depd-1.1.1.tgz",
"integrity": "sha1-V4O04cRZ8G+lyif5kfPQbnoxA1k="
},
"http-errors": {
"version": "1.6.2",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.6.2.tgz",
"integrity": "sha1-CgAsyFcHGSp+eUbO7cERVfYOxzY=",
"requires": {
"depd": "1.1.1",
"inherits": "2.0.3",
"setprototypeof": "1.0.3",
"statuses": ">= 1.3.1 < 2"
}
},
"setprototypeof": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.0.3.tgz",
"integrity": "sha1-ZlZ+NwQ+608E2RvWWMDL77VbjgQ="
}
}
},
"safe-buffer": {
"version": "5.1.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz",
"integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg=="
},
"send": {
"version": "0.16.2",
"resolved": "https://registry.npmjs.org/send/-/send-0.16.2.tgz",
"integrity": "sha512-E64YFPUssFHEFBvpbbjr44NCLtI1AohxQ8ZSiJjQLskAdKuriYEP6VyGEsRDH8ScozGpkaX1BGvhanqCwkcEZw==",
"requires": {
"debug": "2.6.9",
"depd": "~1.1.2",
"destroy": "~1.0.4",
"encodeurl": "~1.0.2",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"fresh": "0.5.2",
"http-errors": "~1.6.2",
"mime": "1.4.1",
"ms": "2.0.0",
"on-finished": "~2.3.0",
"range-parser": "~1.2.0",
"statuses": "~1.4.0"
}
},
"serve-static": {
"version": "1.13.2",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.13.2.tgz",
"integrity": "sha512-p/tdJrO4U387R9oMjb1oj7qSMaMfmOyd4j9hOFoxZe2baQszgHcSWjuya/CiT5kgZZKRudHNOA0pYXOl8rQ5nw==",
"requires": {
"encodeurl": "~1.0.2",
"escape-html": "~1.0.3",
"parseurl": "~1.3.2",
"send": "0.16.2"
}
},
"setprototypeof": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.0.tgz",
"integrity": "sha512-BvE/TwpZX4FXExxOxZyRGQQv651MSwmWKZGqvmPcRIjDqWub67kTKuIMx43cZZrS/cBBzwBcNDWoFxt2XEFIpQ=="
},
"statuses": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-1.4.0.tgz",
"integrity": "sha512-zhSCtt8v2NDrRlPQpCNtw/heZLtfUDqxBM1udqikb/Hbk52LK4nQSwr10u77iopCW5LsyHpuXS0GnEc48mLeew=="
},
"type-is": {
"version": "1.6.16",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.16.tgz",
"integrity": "sha512-HRkVv/5qY2G6I8iab9cI7v1bOIdhm94dVjQCPFElW9W+3GeDOSHmy2EBYe4VTApuzolPcmgFTN3ftVJRKR2J9Q==",
"requires": {
"media-typer": "0.3.0",
"mime-types": "~2.1.18"
}
},
"unpipe": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz",
"integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw="
},
"utils-merge": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz",
"integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM="
},
"vary": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz",
"integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw="
}
}
}

View File

@ -1,15 +0,0 @@
{
"name": "atom-test-update-server",
"version": "0.1.0",
"private": true,
"description": "A test update server that replicates the one on atom.io",
"main": "run-server.js",
"scripts": {
"start": "node run-server.js"
},
"author": "David Wilson",
"dependencies": {
"colors": "^1.3.2",
"express": "^4.16.3"
}
}

View File

@ -1,134 +0,0 @@
require('colors');
const fs = require('fs');
const path = require('path');
const express = require('express');
const app = express();
const port = process.env.PORT || 3456;
// Load the metadata for the local build of Pulsar
const buildPath = path.resolve(__dirname, '..', '..', 'out');
const packageJsonPath = path.join(buildPath, 'app', 'package.json');
if (!fs.existsSync(buildPath) || !fs.existsSync(packageJsonPath)) {
console.log(
`This script requires a full Pulsar build with release packages for the current platform in the following path:\n ${buildPath}\n`
);
if (process.platform === 'darwin') {
console.log(
`Run this command before trying again:\n script/build --compress-artifacts --test-sign\n\n`
);
} else if (process.platform === 'win32') {
console.log(
`Run this command before trying again:\n script/build --create-windows-installer\n\n`
);
}
process.exit(1);
}
const appMetadata = require(packageJsonPath);
const versionMatch = appMetadata.version.match(/-(beta|nightly)\d+$/);
const releaseChannel = versionMatch ? versionMatch[1] : 'stable';
console.log(
`Serving ${
appMetadata.productName
} release assets (channel = ${releaseChannel})\n`.green
);
function getMacZip(req, res) {
console.log(`Received request for pulsar-mac.zip, sending it`);
res.sendFile(path.join(buildPath, 'pulsar-mac.zip'));
}
function getMacUpdates(req, res) {
if (req.query.version !== appMetadata.version) {
const updateInfo = {
name: appMetadata.version,
pub_date: new Date().toISOString(),
url: `http://localhost:${port}/mac/pulsar-mac.zip`,
notes: '<p>No Details</p>'
};
console.log(
`Received request for macOS updates (version = ${
req.query.version
}), sending\n`,
updateInfo
);
res.json(updateInfo);
} else {
console.log(
`Received request for macOS updates, sending 204 as Pulsar is up to date (version = ${
req.query.version
})`
);
res.sendStatus(204);
}
}
function getReleasesFile(fileName) {
return function(req, res) {
console.log(
`Received request for ${fileName}, version: ${req.query.version}`
);
if (req.query.version) {
const versionMatch = (req.query.version || '').match(
/-(beta|nightly)\d+$/
);
const versionChannel = (versionMatch && versionMatch[1]) || 'stable';
if (releaseChannel !== versionChannel) {
console.log(
`Pulsar requested an update for version ${
req.query.version
} but the current release channel is ${releaseChannel}`
);
res.sendStatus(404);
return;
}
}
res.sendFile(path.join(buildPath, fileName));
};
}
function getNupkgFile(is64bit) {
return function(req, res) {
let nupkgFile = req.params.nupkg;
if (is64bit) {
const nupkgMatch = nupkgFile.match(/pulsar-(.+)-(delta|full)\.nupkg/);
if (nupkgMatch) {
nupkgFile = `pulsar-x64-${nupkgMatch[1]}-${nupkgMatch[2]}.nupkg`;
}
}
console.log(
`Received request for ${req.params.nupkg}, sending ${nupkgFile}`
);
res.sendFile(path.join(buildPath, nupkgFile));
};
}
if (process.platform === 'darwin') {
app.get('/mac/pulsar-mac.zip', getMacZip);
app.get('/api/updates', getMacUpdates);
} else if (process.platform === 'win32') {
app.get('/api/updates/RELEASES', getReleasesFile('RELEASES'));
app.get('/api/updates/:nupkg', getNupkgFile());
app.get('/api/updates-x64/RELEASES', getReleasesFile('RELEASES-x64'));
app.get('/api/updates-x64/:nupkg', getNupkgFile(true));
} else {
console.log(
`The current platform '${
process.platform
}' doesn't support Squirrel updates, exiting.`.red
);
process.exit(1);
}
app.listen(port, () => {
console.log(
`Run Pulsar with ATOM_UPDATE_URL_PREFIX="http://localhost:${port}" set to test updates!\n`
.yellow
);
});

View File

@ -1,5 +0,0 @@
target=v12.18.3
node-version=12.18.3
package-lock=true
prefer-frozen-lockfile=true
strict-peer-dependencies=false

View File

@ -1,65 +0,0 @@
# Atom Release Build Documentation
## Overview
This folder contains build configuration and scripts for automating Atom's
release pipeline using [Visual Studio Team Services](https://azure.microsoft.com/en-us/services/visual-studio-team-services/).
VSTS allows us to leverage [multi-phase jobs](https://github.com/Microsoft/vsts-agent/blob/master/docs/preview/yamlgettingstarted-jobs.md) to generate Atom installation packages
on Windows, macOS, and Linux and then publish a new release automatically once
the build completes successfully.
## Nightly Release Build
Our scheduled nightly release uses a mutli-phase job to automatically generate Atom
Nightly installation packages and then publish them to GitHub and atom.io.
The [Atom Nightly build definition](https://github.visualstudio.com/Atom/_build/index?context=mine&path=%5C&definitionId=1&_a=completed)
is configured with the [`nightly-release.yml`](nightly-release.yml) file. More
information on VSTS' YAML configuration format can be found in their [Getting Started](https://github.com/Microsoft/vsts-agent/blob/master/docs/preview/yamlgettingstarted.md)
documentation.
### Versioning Phase
In this phase, we run [`script/vsts/generate-version.js`](generate-version.js) to
determine the version of the next Atom Nightly release. This script consults the
GitHub v3 API to get the list of releases on the [`atom/atom-nightly-releases`](https://github.com/atom/atom-nightly-releases)
repo. We look for the most recent, non-draft release and then parse its version
number (e.g. `1.30.0-nightly4`) to extract the base version and the monotonically-increasing
nightly release number.
Once we have the version and release number, we compare the base version number
(`1.30.0`) against the one in `package.json` of the latest commit in the local
repo. If those versions are the same, we increment the release number (`1.30.0-nightly5`).
If those versions are different, we use `0` for the release number to start a
new series of Nightly releases for the new version (`1.31.0-nightly0`).
Once the release version has been determined, it is set as our custom `ReleaseVersion`
[output variable](https://github.com/Microsoft/vsts-agent/blob/master/docs/preview/yamlgettingstarted-outputvariables.md)
by writing out a special string to `stdout` which is recognized by VSTS. This
variable will be used in later build steps.
If any part of the build process fails from this point forward, the same version
number *should* be chosen in the next build unless the base version number has
been changed in `master`.
### OS-specific Build Phases
In this part of the build, we use [phase templates](https://github.com/Microsoft/vsts-agent/blob/master/docs/preview/yamlgettingstarted-templates.md)
for [Windows](windows.yml), [macOS](macos.yml), and [Linux](linux.yml) to build
Atom simultaneously across those platforms and then run the Atom test suite to
verify the builds. If build, test, and linting come back clean, we take the build
assets generated in the `out` folder on each OS and then stage them as build artifacts.
For each OS build, we refer to the `ReleaseVersion` variable, set in the previous
phase, to configure the `ATOM_RELEASE_VERSION` environment variable to override
the version contained in Atom's `package.json`.
### Publish Phase
If all three OS builds have completed successfully, the publish phase will launch the
[`script/publish-release`](../publish-release) script to collect the release
artifacts created from those builds and then upload them to the S3 bucket from
which Atom release assets are served. If the upload process is successful, a new
release will be created on the `atom/atom-nightly-releases` repo using the
`ReleaseVersion` with a `v` prefix as the tag name. The release assets will also
be uploaded to the GitHub release at this time.

View File

@ -1,100 +0,0 @@
const path = require('path');
const request = require('request-promise-native');
const repositoryRootPath = path.resolve(__dirname, '..', '..');
const appMetadata = require(path.join(repositoryRootPath, 'package.json'));
const { REPO_OWNER, NIGHTLY_RELEASE_REPO } = require('../config');
const yargs = require('yargs');
const argv = yargs
.usage('Usage: $0 [options]')
.help('help')
.describe('nightly', 'Indicates that a nightly version should be produced')
.wrap(yargs.terminalWidth()).argv;
function getAppName(version) {
const match = version.match(/\d+\.\d+\.\d+(-([a-z]+)(\d+|-\w{4,})?)?$/);
if (!match) {
throw new Error(`Found incorrectly formatted Pulsar version ${version}`);
} else if (match[2]) {
return `pulsar-${match[2]}`;
}
return 'pulsar';
}
async function getReleaseVersion() {
let releaseVersion = process.env.ATOM_RELEASE_VERSION || appMetadata.version;
if (argv.nightly) {
const releases = await request({
url: `https://api.github.com/repos/${REPO_OWNER}/${NIGHTLY_RELEASE_REPO}/releases`,
headers: {
Accept: 'application/vnd.github.v3+json',
'User-Agent': 'Pulsar Release Build'
},
json: true
});
let releaseNumber = 0;
const baseVersion = appMetadata.version.split('-')[0];
if (releases && releases.length > 0) {
const latestRelease = releases.find(r => !r.draft);
const versionMatch = latestRelease.tag_name.match(
/^v?(\d+\.\d+\.\d+)-nightly(\d+)$/
);
if (versionMatch && versionMatch[1] === baseVersion) {
releaseNumber = parseInt(versionMatch[2]) + 1;
}
}
releaseVersion = `${baseVersion}-nightly${releaseNumber}`;
}
// Set our ReleaseVersion build variable and update VSTS' build number to
// include the version. Writing these strings to stdout causes VSTS to set
// the associated variables.
console.log(
`##vso[task.setvariable variable=ReleaseVersion;isOutput=true]${releaseVersion}`
);
if (!process.env.SYSTEM_PULLREQUEST_PULLREQUESTNUMBER) {
// Only set the build number on non-PR builds as it causes build errors when
// non-admins send PRs to the repo
console.log(
`##vso[build.updatebuildnumber]${releaseVersion}+${
process.env.BUILD_BUILDID
}`
);
}
// Write out some variables that indicate whether artifacts should be uploaded
const buildBranch = process.env.BUILD_SOURCEBRANCHNAME;
const isReleaseBranch =
process.env.IS_RELEASE_BRANCH ||
argv.nightly ||
buildBranch.match(/\d\.\d+-releases/) !== null;
const isSignedZipBranch =
!isReleaseBranch &&
(process.env.IS_SIGNED_ZIP_BRANCH ||
buildBranch.startsWith('electron-') ||
(buildBranch === 'master' &&
!process.env.SYSTEM_PULLREQUEST_PULLREQUESTNUMBER));
const SHOULD_SIGN = process.env.SHOULD_SIGN;
console.log(
`##vso[task.setvariable variable=AppName;isOutput=true]${getAppName(
releaseVersion
)}`
);
console.log(
`##vso[task.setvariable variable=IsReleaseBranch;isOutput=true]${isReleaseBranch}`
);
console.log(
`##vso[task.setvariable variable=IsSignedZipBranch;isOutput=true]${isSignedZipBranch}`
);
console.log(
`##vso[task.setvariable variable=SHOULD_SIGN;isOutput=true]${SHOULD_SIGN}`
);
}
getReleaseVersion();

View File

@ -1,197 +0,0 @@
const semver = require('semver');
const octokit = require('@octokit/rest')();
const changelog = require('pr-changelog');
const childProcess = require('child_process');
const { REPO_OWNER, MAIN_REPO, NIGHTLY_RELEASE_REPO } = require('../../config');
module.exports.getRelease = async function(releaseVersion, githubToken) {
if (githubToken) {
octokit.authenticate({
type: 'token',
token: githubToken
});
}
const releases = await octokit.repos.getReleases({
owner: REPO_OWNER,
repo: MAIN_REPO
});
const release = releases.data.find(r => semver.eq(r.name, releaseVersion));
return {
exists: release !== undefined,
isDraft: release && release.draft,
releaseNotes: release ? release.body : undefined
};
};
module.exports.generateForVersion = async function(
releaseVersion,
githubToken,
oldReleaseNotes
) {
let oldVersion = null;
let oldVersionName = null;
const parsedVersion = semver.parse(releaseVersion);
const newVersionBranch = getBranchForVersion(parsedVersion);
if (githubToken) {
changelog.setGithubAccessToken(githubToken);
octokit.authenticate({
type: 'token',
token: githubToken
});
}
if (parsedVersion.prerelease && parsedVersion.prerelease[0] === 'beta0') {
// For beta0 releases, stable hasn't been released yet so compare against
// the stable version's release branch
oldVersion = `${parsedVersion.major}.${parsedVersion.minor - 1}-releases`;
oldVersionName = `v${parsedVersion.major}.${parsedVersion.minor - 1}.0`;
} else {
let releases = await octokit.repos.getReleases({
owner: REPO_OWNER,
repo: MAIN_REPO
});
oldVersion = 'v' + getPreviousRelease(releaseVersion, releases.data).name;
oldVersionName = oldVersion;
}
const allChangesText = await changelog.getChangelog({
owner: REPO_OWNER,
repo: MAIN_REPO,
fromTag: oldVersion,
toTag: newVersionBranch,
dependencyKey: 'packageDependencies',
changelogFormatter: function({
pullRequests,
owner,
repo,
fromTag,
toTag
}) {
let prString = changelog.pullRequestsToString(pullRequests);
let title = repo;
if (repo === MAIN_REPO) {
title = 'Pulsar Core';
fromTag = oldVersionName;
toTag = releaseVersion;
}
return `### [${title}](https://github.com/${owner}/${repo})\n\n${fromTag}...${toTag}\n\n${prString}`;
}
});
const writtenReleaseNotes =
extractWrittenReleaseNotes(oldReleaseNotes) ||
'**TODO**: Pull relevant changes here!';
return `## Notable Changes\n
${writtenReleaseNotes}\n
<details>
<summary>All Changes</summary>\n
${allChangesText}
</details>
`;
};
module.exports.generateForNightly = async function(
releaseVersion,
githubToken
) {
const latestCommitResult = childProcess.spawnSync('git', [
'rev-parse',
'--short',
'HEAD'
]);
if (!latestCommitResult) {
console.log("Couldn't get the current commmit from git.");
return undefined;
}
const latestCommit = latestCommitResult.stdout.toString().trim();
const output = [
`### This nightly release is based on https://github.com/${REPO_OWNER}/${MAIN_REPO}/commit/${latestCommit} :atom: :night_with_stars:`
];
try {
const releases = await octokit.repos.getReleases({
owner: REPO_OWNER,
repo: NIGHTLY_RELEASE_REPO
});
const previousRelease = getPreviousRelease(releaseVersion, releases.data);
const oldReleaseNotes = previousRelease ? previousRelease.body : undefined;
if (oldReleaseNotes) {
const extractMatch = oldReleaseNotes.match(
/pulsar-edit\/pulsar\/commit\/([0-9a-f]{5,40})/
);
if (extractMatch.length > 1 && extractMatch[1]) {
output.push('', '---', '');
const previousCommit = extractMatch[1];
if (
previousCommit === latestCommit ||
previousCommit.startsWith(latestCommit) ||
latestCommit.startsWith(previousCommit)
) {
// TODO: Maybe we can bail out and not publish a release if it contains no commits?
output.push('No changes have been included in this release');
} else {
output.push(
`Click [here](https://github.com/${REPO_OWNER}/${MAIN_REPO}/compare/${previousCommit}...${latestCommit}) to see the changes included with this release!`
);
}
}
}
} catch (e) {
console.log(
'Error when trying to find the previous nightly release: ' + e.message
);
}
return output.join('\n');
};
function extractWrittenReleaseNotes(oldReleaseNotes) {
if (oldReleaseNotes) {
const extractMatch = oldReleaseNotes.match(
/^## Notable Changes\r\n([\s\S]*)<details>/
);
if (extractMatch && extractMatch[1]) {
return extractMatch[1].trim();
}
}
return undefined;
}
function getPreviousRelease(version, allReleases) {
const versionIsStable = semver.prerelease(version) === null;
// Make sure versions are sorted before using them
allReleases.sort((v1, v2) => semver.rcompare(v1.name, v2.name));
for (let release of allReleases) {
if (versionIsStable && semver.prerelease(release.name)) {
continue;
}
if (semver.lt(release.name, version)) {
return release;
}
}
return null;
}
function getBranchForVersion(version) {
let parsedVersion = version;
if (!(version instanceof semver.SemVer)) {
parsedVersion = semver.parse(version);
}
return `${parsedVersion.major}.${parsedVersion.minor}-releases`;
}

View File

@ -1,134 +0,0 @@
const fs = require('fs');
const path = require('path');
const request = require('request-promise-native');
module.exports = async function(packageRepoName, apiToken, version, artifacts) {
for (let artifact of artifacts) {
let fileExt = path.extname(artifact);
switch (fileExt) {
case '.deb':
await uploadDebPackage(version, artifact);
break;
case '.rpm':
await uploadRpmPackage(version, artifact);
break;
default:
continue;
}
}
async function uploadDebPackage(version, filePath) {
// NOTE: Not sure if distro IDs update over time, might need
// to query the following endpoint dynamically to find the right IDs:
//
// https://{apiToken}:@packagecloud.io/api/v1/distributions.json
await uploadPackage({
version,
filePath,
type: 'deb',
arch: 'amd64',
fileName: 'atom-amd64.deb',
distroId: 35 /* Any .deb distribution */,
distroName: 'any',
distroVersion: 'any'
});
}
async function uploadRpmPackage(version, filePath) {
await uploadPackage({
version,
filePath,
type: 'rpm',
arch: 'x86_64',
fileName: 'atom.x86_64.rpm',
distroId: 140 /* Enterprise Linux 7 */,
distroName: 'el',
distroVersion: '7'
});
}
async function uploadPackage(packageDetails) {
// Infer the package suffix from the version
if (/-beta\d+/.test(packageDetails.version)) {
packageDetails.releaseSuffix = '-beta';
} else if (/-nightly\d+/.test(packageDetails.version)) {
packageDetails.releaseSuffix = '-nightly';
}
await removePackageIfExists(packageDetails);
await uploadToPackageCloud(packageDetails);
}
function uploadToPackageCloud(packageDetails) {
return new Promise(async (resolve, reject) => {
console.log(
`Uploading ${
packageDetails.fileName
} to https://packagecloud.io/AtomEditor/${packageRepoName}`
);
var uploadOptions = {
url: `https://${apiToken}:@packagecloud.io/api/v1/repos/AtomEditor/${packageRepoName}/packages.json`,
formData: {
'package[distro_version_id]': packageDetails.distroId,
'package[package_file]': fs.createReadStream(packageDetails.filePath)
}
};
request.post(uploadOptions, (error, uploadResponse, body) => {
if (error || uploadResponse.statusCode !== 201) {
console.log(
`Error while uploading '${packageDetails.fileName}' v${
packageDetails.version
}: ${uploadResponse}`
);
reject(uploadResponse);
} else {
console.log(`Successfully uploaded ${packageDetails.fileName}!`);
resolve(uploadResponse);
}
});
});
}
async function removePackageIfExists({
version,
type,
arch,
fileName,
distroName,
distroVersion,
releaseSuffix
}) {
// RPM URI paths have an extra '/0.1' thrown in
let versionJsonPath =
type === 'rpm' ? `${version.replace('-', '.')}/0.1` : version;
try {
const existingPackageDetails = await request({
uri: `https://${apiToken}:@packagecloud.io/api/v1/repos/AtomEditor/${packageRepoName}/package/${type}/${distroName}/${distroVersion}/atom${releaseSuffix ||
''}/${arch}/${versionJsonPath}.json`,
method: 'get',
json: true
});
if (existingPackageDetails && existingPackageDetails.destroy_url) {
console.log(
`Deleting pre-existing package ${fileName} in ${packageRepoName}`
);
await request({
uri: `https://${apiToken}:@packagecloud.io/${
existingPackageDetails.destroy_url
}`,
method: 'delete'
});
}
} catch (err) {
if (err.statusCode !== 404) {
console.log(
`Error while checking for existing '${fileName}' v${version}:\n\n`,
err
);
}
}
}
};

View File

@ -1,51 +0,0 @@
'use strict';
const path = require('path');
const { BlobServiceClient } = require('@azure/storage-blob');
module.exports = function upload(connStr, directory, assets) {
const blobServiceClient = BlobServiceClient.fromConnectionString(connStr);
const containerName = 'atom-build';
const containerClient = blobServiceClient.getContainerClient(containerName);
async function listExistingAssetsForDirectory() {
return containerClient.listBlobsFlat({ prefix: directory });
}
async function deleteExistingAssets(existingAssets = []) {
try {
for await (const asset of existingAssets) {
console.log(`Deleting blob ${asset.name}`);
containerClient.deleteBlob(asset.name);
}
return Promise.resolve(true);
} catch (ex) {
return Promise.reject(ex.message);
}
}
function uploadAssets(assets) {
return assets.reduce(function(promise, asset) {
return promise.then(() => uploadAsset(asset));
}, Promise.resolve());
}
function uploadAsset(assetPath) {
return new Promise(async (resolve, reject) => {
try {
console.info(`Uploading ${assetPath}`);
const blockBlobClient = containerClient.getBlockBlobClient(
path.join(directory, path.basename(assetPath))
);
const result = await blockBlobClient.uploadFile(assetPath);
resolve(result);
} catch (ex) {
reject(ex.message);
}
});
}
return listExistingAssetsForDirectory()
.then(deleteExistingAssets)
.then(() => uploadAssets(assets));
};

View File

@ -1,12 +0,0 @@
jobs:
- job: Lint
timeoutInMinutes: 10
pool:
vmImage: ubuntu-latest
steps:
- script: |
cd script
npm ci
displayName: Install script dependencies
- script: node ./script/lint.js
displayName: Run linter

View File

@ -1,79 +0,0 @@
# workaround for https://bit.ly/2CK8itc
variables:
_ATOM_RELEASES_S3_KEY: $[ variables.ATOM_RELEASES_S3_KEY ]
_ATOM_RELEASES_S3_SECRET: $[ variables.ATOM_RELEASES_S3_SECRET ]
_ATOM_RELEASES_S3_BUCKET: $[ variables.ATOM_RELEASES_S3_BUCKET ]
_PACKAGE_CLOUD_API_KEY: $[ variables.PACKAGE_CLOUD_API_KEY ]
jobs:
# GetReleaseVersion for nightly release
- template: platforms/templates/get-release-version.yml
parameters:
NightlyFlag: --nightly
# Import lint definition
- template: lint.yml
# Import OS-specific build definitions
- template: platforms/windows.yml
- template: platforms/macos.yml
- template: platforms/linux.yml
- job: Release
pool:
vmImage: 'ubuntu-latest'
dependsOn:
- GetReleaseVersion
- Lint
- Windows_tests
- Linux
- macOS_tests
variables:
ReleaseVersion: $[ dependencies.GetReleaseVersion.outputs['Version.ReleaseVersion'] ]
IsNightlyBranch: true
steps:
- template: platforms/templates/preparation.yml
- task: DownloadBuildArtifacts@0
inputs:
itemPattern: '**'
downloadType: 'specific'
displayName: Download Release Artifacts
- script: |
node $(Build.SourcesDirectory)/script/vsts/upload-artifacts.js --create-github-release --assets-path "$(System.ArtifactsDirectory)" --linux-repo-name "atom"
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
ATOM_RELEASE_VERSION: $(ReleaseVersion)
ATOM_RELEASES_S3_KEY: $(ATOM_RELEASES_S3_KEY)
ATOM_RELEASES_AZURE_CONN_STRING: $(ATOM_RELEASES_AZURE_CONN_STRING)
ATOM_RELEASES_S3_SECRET: $(ATOM_RELEASES_S3_SECRET)
ATOM_RELEASES_S3_BUCKET: $(ATOM_RELEASES_S3_BUCKET)
PACKAGE_CLOUD_API_KEY: $(PACKAGE_CLOUD_API_KEY)
displayName: Create Nightly Release
- job: bump_dependencies
displayName: Bump Dependencies
timeoutInMinutes: 180
pool:
vmImage: macos-10.15
steps:
- template: platforms/templates/preparation.yml
- template: platforms/templates/bootstrap.yml
- script: |
cd script/lib
npm install
displayName: npm install
- script: |
cd script/lib/update-dependency
node index.js
displayName: Bump depedencies
env:
AUTH_TOKEN: $(GITHUB_TOKEN)

File diff suppressed because it is too large Load Diff

View File

@ -1,16 +0,0 @@
{
"name": "atom-release-scripts",
"description": "Atom release scripts",
"dependencies": {
"@azure/storage-blob": "^12.5.0",
"@octokit/rest": "^15.9.5",
"download": "^7.1.0",
"glob": "7.0.3",
"pr-changelog": "^0.3.4",
"publish-release": "^1.6.0",
"request": "^2.87.0",
"request-promise-native": "^1.0.5",
"semver": "5.3.0",
"yargs": "4.8.1"
}
}

View File

@ -1,34 +0,0 @@
jobs:
- job: Linux
dependsOn: GetReleaseVersion
timeoutInMinutes: 180
variables:
ReleaseVersion: $[ dependencies.GetReleaseVersion.outputs['Version.ReleaseVersion'] ]
pool:
vmImage: ubuntu-18.04
steps:
- template: templates/preparation.yml
- template: templates/cache.yml
parameters:
OS: linux
- template: templates/bootstrap.yml
- template: templates/build.yml
- template: templates/test.yml
- template: templates/publish.yml
parameters:
artifacts:
- fileName: atom.x86_64.rpm
fileDir: $(Build.SourcesDirectory)/out
condition: and(succeeded(), ne(variables['Build.Reason'], 'PullRequest'))
- fileName: atom-amd64.deb
fileDir: $(Build.SourcesDirectory)/out
condition: and(succeeded(), ne(variables['Build.Reason'], 'PullRequest'))
- fileName: atom-amd64.tar.gz
fileDir: $(Build.SourcesDirectory)/out
condition: and(succeeded(), ne(variables['Build.Reason'], 'PullRequest'))

View File

@ -1,82 +0,0 @@
jobs:
- job: macOS_build
displayName: macOS Build
dependsOn: GetReleaseVersion
timeoutInMinutes: 180
variables:
ReleaseVersion: $[ dependencies.GetReleaseVersion.outputs['Version.ReleaseVersion'] ]
IsReleaseBranch: $[ dependencies.GetReleaseVersion.outputs['Version.IsReleaseBranch'] ]
IsSignedZipBranch: $[ dependencies.GetReleaseVersion.outputs['Version.IsSignedZipBranch'] ]
RunCoreMainTests: true
pool:
vmImage: macos-10.15
steps:
- template: templates/preparation.yml
- template: templates/cache.yml
parameters:
OS: macos
- template: templates/bootstrap.yml
- template: templates/build.yml
# core main tests
- template: templates/test.yml
- script: |
cp $(Build.SourcesDirectory)/out/*.zip $(Build.ArtifactStagingDirectory)
displayName: Stage Artifacts
- template: templates/publish.yml
parameters:
artifacts:
- fileName: atom-mac.zip
fileDir: $(Build.ArtifactStagingDirectory)
condition: succeeded()
- fileName: atom-mac-symbols.zip
fileDir: $(Build.ArtifactStagingDirectory)
condition: succeeded()
- fileName: atom-api.json
fileDir: $(Build.SourcesDirectory)/docs/output
condition: succeeded()
- job: macOS_tests
displayName: macOS Tests
dependsOn: macOS_build
timeoutInMinutes: 180
pool:
vmImage: macos-10.15
strategy:
maxParallel: 3
matrix:
renderer:
RunCoreRendererTests: true
RunPackageTests: false
packages-1:
RunCoreTests: false
RunPackageTests: 1
packages-2:
RunCoreTests: false
RunPackageTests: 2
steps:
- template: templates/preparation.yml
- template: templates/cache.yml
parameters:
OS: macos
# The artifact caching task does not work on forks, so we need to
# bootstrap again for pull requests coming from forked repositories.
- template: templates/bootstrap.yml
- template: templates/download-unzip.yml
parameters:
artifacts:
- atom-mac.zip
- atom-mac-symbols.zip
- template: templates/test.yml

View File

@ -1,26 +0,0 @@
steps:
- pwsh: |
# OS specific env variables
if ($env:AGENT_OS -eq "Windows_NT") {
$env:NPM_BIN_PATH="C:/npm/prefix/npm.cmd"
$env:npm_config_build_from_source=true
}
if ($env:AGENT_OS -eq "Darwin") {
$env:NPM_BIN_PATH="/usr/local/bin/npm"
$env:npm_config_build_from_source=true
}
if ($env:AGENT_OS -eq "Linux") {
$env:NPM_BIN_PATH="/usr/local/bin/npm"
$env:CC=clang
$env:CXX=clang++
$env:npm_config_clang=1
}
# Bootstrap
node ./script/bootstrap.js
displayName: Bootstrap build environment
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
CI: true
CI_PROVIDER: VSTS
condition: or(ne(variables['MainNodeModulesRestored'], 'true'), ne(variables['ScriptRunnerNodeModulesRestored'], true), ne(variables['ScriptNodeModulesRestored'], 'true'), ne(variables['ApmNodeModulesRestored'], 'true'), ne(variables['LocalPackagesRestored'], 'true'))

View File

@ -1,52 +0,0 @@
steps:
- pwsh: |
# OS specific env variables
if ($env:AGENT_OS -eq "Windows_NT") {
$env:SQUIRREL_TEMP="C:/tmp"
$env:npm_config_build_from_source=true
}
elseif ($env:AGENT_OS -eq "Linux") {
$env:CC=clang
$env:CXX=clang++
$env:npm_config_clang=1
$env:LinuxArgs="--create-debian-package --create-rpm-package"
$env:SHOULD_SIGN="false"
}
# Build Arguments
## Creation of Windows Installaer
if ($env:AGENT_OS -eq "Windows_NT") {
mkdir -f -p $env:SQUIRREL_TEMP
if ($env:IS_RELEASE_BRANCH -eq "true") {
$CreateWindowsInstallaer="--create-windows-installer"
}
}
## Code Sign
if ( ($env:SHOULD_SIGN -eq "true") -and (($env:IS_RELEASE_BRANCH -eq "true") -or ($env:IS_SIGNED_ZIP_BRANCH -eq "true")) ) {
$CodeSign="--code-sign"
}
# Build
$esc = '--%'
node ./script/build.js --no-bootstrap --compress-artifacts $esc $env:LinuxArgs $CodeSign $CreateWindowsInstallaer
displayName: Build Atom
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
IS_RELEASE_BRANCH: $(IsReleaseBranch)
IS_SIGNED_ZIP_BRANCH: $(IsSignedZipBranch)
ATOM_RELEASE_VERSION: $(ReleaseVersion)
ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL: $(ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL)
ATOM_MAC_CODE_SIGNING_CERT_PASSWORD: $(ATOM_MAC_CODE_SIGNING_CERT_PASSWORD)
ATOM_MAC_CODE_SIGNING_KEYCHAIN: $(ATOM_MAC_CODE_SIGNING_KEYCHAIN)
ATOM_MAC_CODE_SIGNING_KEYCHAIN_PASSWORD: $(ATOM_MAC_CODE_SIGNING_KEYCHAIN_PASSWORD)
AC_USER: $(AC_USER)
AC_PASSWORD: $(AC_PASSWORD)
ATOM_WIN_CODE_SIGNING_CERT_DOWNLOAD_URL: $(ATOM_WIN_CODE_SIGNING_CERT_DOWNLOAD_URL)
ATOM_WIN_CODE_SIGNING_CERT_PASSWORD: $(ATOM_WIN_CODE_SIGNING_CERT_PASSWORD)
- script: |
sudo chown root ./out/atom*-amd64/chrome-sandbox
sudo chmod 4755 ./out/atom*-amd64/chrome-sandbox
displayName: Tweaking chrome-sandbox binary
condition: eq(variables['Agent.OS'], 'Linux')

View File

@ -1,44 +0,0 @@
parameters:
- name: OS
displayName: Operating System
type: string
values:
- windows
- linux
- macos
steps:
- task: Cache@2
displayName: Cache node_modules
inputs:
key: 'npm_main | "$(Agent.OS)" | "$(BUILD_ARCH)" | packages/**, !packages/**/node_modules/** | package.json, package-lock.json, apm/package.json, script/package.json, script/package-lock.json, script/script-runner/package.json, script/script-runner/package-lock.json, script/vsts/platforms/${{ parameters.OS }}.yml, script/vsts/platforms/templates/preparation.yml'
path: 'node_modules'
cacheHitVar: MainNodeModulesRestored
- task: Cache@2
displayName: Cache script/script-runner/node_modules
inputs:
key: 'npm_script_runner | "$(Agent.OS)" | "$(BUILD_ARCH)" | package.json, package-lock.json, apm/package.json, script/package.json, script/package-lock.json, script/script-runner/package.json, script/script-runner/package-lock.json, script/vsts/platforms/${{ parameters.OS }}.yml, script/vsts/platforms/templates/preparation.yml'
path: 'script/script-runner/node_modules'
cacheHitVar: ScriptRunnerNodeModulesRestored
- task: Cache@2
displayName: Cache script/node_modules
inputs:
key: 'npm_script | "$(Agent.OS)" | "$(BUILD_ARCH)" | packages/**, !packages/**/node_modules/** | package.json, package-lock.json, apm/package.json, script/package.json, script/package-lock.json, script/script-runner/package.json, script/script-runner/package-lock.json, script/vsts/platforms/${{ parameters.OS }}.yml, script/vsts/platforms/templates/preparation.yml'
path: 'script/node_modules'
cacheHitVar: ScriptNodeModulesRestored
- task: Cache@2
displayName: Cache apm/node_modules
inputs:
key: 'npm_apm | "$(Agent.OS)" | "$(BUILD_ARCH)" | packages/**, !packages/**/node_modules/** | package.json, package-lock.json, apm/package.json, script/package.json, script/package-lock.json, script/script-runner/package.json, script/script-runner/package-lock.json, script/vsts/platforms/${{ parameters.OS }}.yml, script/vsts/platforms/templates/preparation.yml'
path: 'apm/node_modules'
cacheHitVar: ApmNodeModulesRestored
- task: Cache@2
displayName: Cache packages/
inputs:
key: 'npm_local_packages | "$(Agent.OS)" | "$(BUILD_ARCH)" | packages/**, !packages/**/node_modules/** | package.json, package-lock.json, apm/package.json, script/package.json, script/package-lock.json, script/vsts/platforms/${{ parameters.OS }}.yml, script/vsts/platforms/templates/preparation.yml'
path: 'packages'
cacheHitVar: LocalPackagesRestored

View File

@ -1,18 +0,0 @@
parameters:
- name: artifacts
type: object
default: {}
- name: downloadPath
type: string
default: $(Build.SourcesDirectory)
steps:
- ${{ each artifact in parameters.artifacts }}:
- task: DownloadBuildArtifacts@0
displayName: Download ${{artifact}}
inputs:
artifactName: ${{artifact}}
downloadPath: ${{parameters.downloadPath}}
- script: unzip ${{artifact}}/${{artifact}} -d out
displayName: Unzip ${{artifact}}

View File

@ -1,20 +0,0 @@
parameters:
- name: NightlyFlag
type: string
values:
- ' '
- --nightly
default: ' '
jobs:
- job: GetReleaseVersion
displayName: Get Release Version
pool:
vmImage: 'ubuntu-latest'
steps:
- script: |
cd script/vsts
npm install
node get-release-version.js ${{ parameters.NightlyFlag }}
name: Version

View File

@ -1,48 +0,0 @@
steps:
# Linux Specific
- script: |
sudo apt-get update
sudo apt-get install -y build-essential ca-certificates xvfb fakeroot git rpm libsecret-1-dev libx11-dev libxkbfile-dev xz-utils xorriso zsync libxss1 libgtk-3-0 libasound2 libicu-dev software-properties-common wget dpkg
# clang 9 is included in the image
clang -v
displayName: Install apt dependencies
condition: eq(variables['Agent.OS'], 'Linux')
- script: sudo /sbin/start-stop-daemon --start --quiet --pidfile /tmp/custom_xvfb_99.pid --make-pidfile --background --exec /usr/bin/Xvfb -- :99 -ac -screen 0 1280x1024x16
displayName: Start Xvfb
condition: eq(variables['Agent.OS'], 'Linux')
# Common
- pwsh: |
if ($env:BUILD_ARCH -eq "x86") {
echo "##vso[task.setvariable variable=IsWinX86]true"
}
displayName: Set "IsWinX86" based on "BUILD_ARCH"
# Convert "BUILD_ARCH" to a boolean ("IsWinX86") for the following NodeTool task.
# Templates evaluate too early for the matrix variable "BUILD_ARCH" to be available in ${{ template expressions }}.
# Scripts are interpreted at runtime, so "BUILD_ARCH" will be available to this script, and we can set "IsWinX86".
- task: NodeTool@0
inputs:
versionSpec: 12.18.3
force32bit: $(IsWinX86)
displayName: Install Node.js 12.18.3
- script: npm install --global npm@6.14.8
displayName: Update npm
- pwsh: |
cd script/vsts
npm install
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
displayName: Install script/vsts dependencies
condition: or(eq(variables['Agent.OS'], 'Windows_NT'), eq(variables['IsReleaseBranch'], 'true'), eq(variables['IsSignedZipBranch'], true), eq(variables['IsNightlyBranch'], 'true'))
# Windows Specific
- task: UsePythonVersion@0
inputs:
versionSpec: '3.8'
condition: eq(variables['Agent.OS'], 'Windows_NT')

View File

@ -1,21 +0,0 @@
parameters:
- name: artifacts
type: object
# artifacts is an array with each element having these properties:
# - fileName
# - fileDir
# - condition
# - continueOnError
steps:
- ${{ each artifact in parameters.artifacts }}:
- task: PublishBuildArtifacts@1
inputs:
PathtoPublish: ${{artifact.fileDir}}/${{artifact.fileName}}
ArtifactName: ${{artifact.fileName}}
ArtifactType: Container
displayName: Upload ${{artifact.fileName}}
${{ if artifact.condition }}:
condition: ${{artifact.condition}}
${{ if artifact.continueOnError }}:
continueOnError: ${{artifact.continueOnError}}

View File

@ -1,79 +0,0 @@
steps:
- pwsh: |
# OS specific env variables
if ($env:AGENT_OS -eq "Linux") {
$env:DISPLAY=":99.0"
$env:npm_config_build_from_source=true
}
# Test
if ($env:AGENT_OS -eq "Darwin") {
osascript -e 'tell application "System Events" to keystroke "x"' # clear screen saver
caffeinate -s './script/test.js' # Run with caffeinate to prevent screen saver. node shabang is automatically used.
} else {
node ./script/test.js
}
env:
CI: true
CI_PROVIDER: VSTS
ATOM_JASMINE_REPORTER: list
TEST_JUNIT_XML_ROOT: $(Common.TestResultsDirectory)/junit
ATOM_RUN_CORE_TESTS: $(RunCoreTests)
ATOM_RUN_CORE_MAIN_TESTS: $(RunCoreMainTests)
ATOM_RUN_CORE_RENDER_TESTS: $(RunCoreRendererTests)
ATOM_RUN_PACKAGE_TESTS: $(RunPackageTests)
displayName: Run tests
condition: and(succeeded(), ne(variables['Atom.SkipTests'], 'true'))
# Test results
- pwsh: script/postprocess-junit-results --search-folder "$env:TEST_JUNIT_XML_ROOT" --test-results-files "**/*.xml"
env:
TEST_JUNIT_XML_ROOT: $(Common.TestResultsDirectory)/junit
displayName: Post-process test results
condition: ne(variables['Atom.SkipTests'], 'true')
- task: PublishTestResults@2
inputs:
testResultsFormat: JUnit
searchFolder: $(Common.TestResultsDirectory)/junit
testResultsFiles: '**/*.xml'
mergeTestResults: true
testRunTitle: $(Agent.OS) $(BUILD_ARCH)
condition: ne(variables['Atom.SkipTests'], 'true')
# Crash Reports
- pwsh: |
New-Item -Path $env:ARTIFACT_STAGING_DIR/crash-reports -Type Directory -Force
if (($env:AGENT_OS -eq "Windows_NT") -and (Test-Path "$env:TEMP/Atom Crashes")) {
cp "$env:TEMP/Atom Crashes/*.dmp" $env:ARTIFACT_STAGING_DIR/crash-reports
} else {
cp $env:HOME/Library/Logs/DiagnosticReports/*.crash $env:ARTIFACT_STAGING_DIR/crash-reports
}
env:
ARTIFACT_STAGING_DIR: $(Build.ArtifactStagingDirectory)
displayName: Stage Crash Reports
condition: failed()
- task: PublishBuildArtifacts@1
inputs:
PathtoPublish: $(Build.ArtifactStagingDirectory)/crash-reports
ArtifactName: crash-reports.zip
${{ if eq(variables['Agent.OS'], 'Windows_NT') }}:
condition: and(failed(), eq(variables['ATOM_RELEASES_S3_KEY'], ''))
displayName: Publish crash reports on non-release branch
${{ if ne(variables['Agent.OS'], 'Windows_NT') }}:
condition: failed()
displayName: Upload Crash Reports
- script: >
node $(Build.SourcesDirectory)\script\vsts\upload-crash-reports.js --crash-report-path "%ARTIFACT_STAGING_DIR%\crash-reports" --azure-blob-path "vsts-artifacts/%BUILD_ID%/"
env:
ATOM_RELEASES_S3_KEY: $(ATOM_RELEASES_S3_KEY)
ATOM_RELEASES_AZURE_CONN_STRING: $(ATOM_RELEASES_AZURE_CONN_STRING)
ATOM_RELEASES_S3_SECRET: $(ATOM_RELEASES_S3_SECRET)
ATOM_RELEASES_S3_BUCKET: $(ATOM_RELEASES_S3_BUCKET)
ARTIFACT_STAGING_DIR: $(Build.ArtifactStagingDirectory)
BUILD_ID: $(Build.BuildId)
displayName: Upload crash reports to S3 on release branch
condition: and(failed(), ne(variables['ATOM_RELEASES_S3_KEY'], ''), eq(variables['Agent.OS'], 'Windows_NT'))

View File

@ -1,119 +0,0 @@
jobs:
- job: Windows_build
displayName: Windows Build
dependsOn: GetReleaseVersion
timeoutInMinutes: 180
strategy:
maxParallel: 2
matrix:
x64:
BUILD_ARCH: x64
RunCoreMainTests: true
x86:
BUILD_ARCH: x86
RunCoreMainTests: true
pool:
vmImage: windows-2019
variables:
AppName: $[ dependencies.GetReleaseVersion.outputs['Version.AppName'] ]
ReleaseVersion: $[ dependencies.GetReleaseVersion.outputs['Version.ReleaseVersion'] ]
IsReleaseBranch: $[ dependencies.GetReleaseVersion.outputs['Version.IsReleaseBranch'] ]
IsSignedZipBranch: $[ dependencies.GetReleaseVersion.outputs['Version.IsSignedZipBranch'] ]
steps:
- template: templates/preparation.yml
- template: templates/cache.yml
parameters:
OS: windows
- template: templates/bootstrap.yml
- template: templates/build.yml
- template: templates/test.yml
- pwsh: |
if ($env:BUILD_ARCH -eq "x64") {
$env:FileID="-x64"
} else {
$env:FileID=""
}
echo "##vso[task.setvariable variable=FileID]$env:FileID" # Azure syntax
displayName: Set FileID based on the arch
- template: templates/publish.yml
parameters:
artifacts:
- fileName: atom$(FileID)-windows.zip
fileDir: $(Build.SourcesDirectory)/out
condition: and( succeeded(), or( eq(variables['BUILD_ARCH'], 'x64'), ne(variables['Build.Reason'], 'PullRequest') ) )
- fileName: AtomSetup$(FileID).exe
fileDir: $(Build.SourcesDirectory)/out
condition: and(succeeded(), eq(variables['IsReleaseBranch'], 'true'))
- fileName: $(AppName)$(FileID)-$(ReleaseVersion)-full.nupkg
fileDir: $(Build.SourcesDirectory)/out
condition: and(succeeded(), eq(variables['IsReleaseBranch'], 'true'))
- fileName: $(AppName)$(FileID)-$(ReleaseVersion)-delta.nupkg
fileDir: $(Build.SourcesDirectory)/out
condition: and(succeeded(), eq(variables['IsReleaseBranch'], 'true'))
continueOnError: true # Nightly builds don't produce delta packages yet, so don't fail the build
- fileName: RELEASES$(FileID)
fileDir: $(Build.SourcesDirectory)/out
condition: and(succeeded(), eq(variables['IsReleaseBranch'], 'true'))
- job: Windows_tests
displayName: Windows Tests
dependsOn: Windows_build
timeoutInMinutes: 180
strategy:
maxParallel: 2
matrix:
x64_Renderer_Test1:
RunCoreMainTests: false
RunCoreRendererTests: 1
BUILD_ARCH: x64
os: windows-2019
x64_Renderer_Test2:
RunCoreMainTests: false
RunCoreRendererTests: 2
BUILD_ARCH: x64
os: windows-2019
pool:
vmImage: $(os)
variables:
AppName: $[ dependencies.GetReleaseVersion.outputs['Version.AppName'] ]
ReleaseVersion: $[ dependencies.GetReleaseVersion.outputs['Version.ReleaseVersion'] ]
IsReleaseBranch: $[ dependencies.GetReleaseVersion.outputs['Version.IsReleaseBranch'] ]
IsSignedZipBranch: $[ dependencies.GetReleaseVersion.outputs['Version.IsSignedZipBranch'] ]
steps:
- template: templates/preparation.yml
- template: templates/cache.yml
parameters:
OS: windows
- template: templates/bootstrap.yml
# Downloading the build artifacts
- pwsh: |
if ($env:BUILD_ARCH -eq "x64") {
$env:FileID="-x64"
} else {
$env:FileID=""
}
echo "##vso[task.setvariable variable=FileID]$env:FileID" # Azure syntax
displayName: Set FileID based on the arch
- template: templates/download-unzip.yml
parameters:
artifacts:
- atom$(FileID)-windows.zip
# tests
- template: templates/test.yml

View File

@ -1,13 +0,0 @@
trigger: none # No CI builds, only PR builds
jobs:
# GetReleaseVersion
- template: platforms/templates/get-release-version.yml
# Import lint definition
- template: lint.yml
# Import OS-specific build definitions
- template: platforms/windows.yml
- template: platforms/macos.yml
- template: platforms/linux.yml

View File

@ -1,73 +0,0 @@
trigger:
- master
- 1.* # VSTS only supports wildcards at the end
- electron-*
pr: none # no PR triggers
# workaround for https://bit.ly/2CK8itc
variables:
_ATOM_RELEASES_S3_KEY: $[ variables.ATOM_RELEASES_S3_KEY ]
_ATOM_RELEASES_S3_SECRET: $[ variables.ATOM_RELEASES_S3_SECRET ]
_ATOM_RELEASES_S3_BUCKET: $[ variables.ATOM_RELEASES_S3_BUCKET ]
_PACKAGE_CLOUD_API_KEY: $[ variables.PACKAGE_CLOUD_API_KEY ]
jobs:
# GetReleaseVersion
- template: platforms/templates/get-release-version.yml
# Import lint definition
- template: lint.yml
# Import OS-specific build definitions.
- template: platforms/windows.yml
- template: platforms/macos.yml
- template: platforms/linux.yml
- job: UploadArtifacts
pool:
vmImage: 'ubuntu-latest'
dependsOn:
- GetReleaseVersion
- Lint
- Windows_tests
- Linux
- macOS_tests
variables:
ReleaseVersion: $[ dependencies.GetReleaseVersion.outputs['Version.ReleaseVersion'] ]
IsReleaseBranch: $[ dependencies.GetReleaseVersion.outputs['Version.IsReleaseBranch'] ]
IsSignedZipBranch: $[ dependencies.GetReleaseVersion.outputs['Version.IsSignedZipBranch'] ]
steps:
- template: platforms/templates/preparation.yml
- task: DownloadBuildArtifacts@0
inputs:
itemPattern: '**'
downloadType: 'specific'
displayName: Download Release Artifacts
- script: |
node $(Build.SourcesDirectory)/script/vsts/upload-artifacts.js --create-github-release --assets-path "$(System.ArtifactsDirectory)" --linux-repo-name "atom-staging"
env:
GITHUB_TOKEN: $(GITHUB_TOKEN)
ATOM_RELEASE_VERSION: $(ReleaseVersion)
ATOM_RELEASES_S3_KEY: $(ATOM_RELEASES_S3_KEY)
ATOM_RELEASES_AZURE_CONN_STRING: $(ATOM_RELEASES_AZURE_CONN_STRING)
ATOM_RELEASES_S3_SECRET: $(ATOM_RELEASES_S3_SECRET)
ATOM_RELEASES_S3_BUCKET: $(ATOM_RELEASES_S3_BUCKET)
PACKAGE_CLOUD_API_KEY: $(PACKAGE_CLOUD_API_KEY)
displayName: Create Draft Release
condition: and(succeeded(), eq(variables['Atom.AutoDraftRelease'], 'true'), eq(variables['IsReleaseBranch'], 'true'))
- script: |
node $(Build.SourcesDirectory)/script/vsts/upload-artifacts.js --assets-path "$(System.ArtifactsDirectory)" --azure-blob-path "vsts-artifacts/$(Build.BuildId)/"
env:
ATOM_RELEASE_VERSION: $(ReleaseVersion)
ATOM_RELEASES_S3_KEY: $(ATOM_RELEASES_S3_KEY)
ATOM_RELEASES_AZURE_CONN_STRING: $(ATOM_RELEASES_AZURE_CONN_STRING)
ATOM_RELEASES_S3_SECRET: $(ATOM_RELEASES_S3_SECRET)
ATOM_RELEASES_S3_BUCKET: $(ATOM_RELEASES_S3_BUCKET)
displayName: Upload CI Artifacts to S3
condition: and(succeeded(), eq(variables['IsSignedZipBranch'], 'true'))

View File

@ -1,187 +0,0 @@
'use strict';
const fs = require('fs');
const os = require('os');
const path = require('path');
const glob = require('glob');
const spawnSync = require('../lib/spawn-sync');
const publishRelease = require('publish-release');
const releaseNotes = require('./lib/release-notes');
const uploadToAzure = require('./lib/upload-to-azure-blob');
const uploadLinuxPackages = require('./lib/upload-linux-packages');
const CONFIG = require('../config');
const { REPO_OWNER, MAIN_REPO, NIGHTLY_RELEASE_REPO } = CONFIG;
const yargs = require('yargs');
const argv = yargs
.usage('Usage: $0 [options]')
.help('help')
.describe(
'assets-path',
'Path to the folder where all release assets are stored'
)
.describe(
'azure-blob-path',
'Indicates the Azure Blob Path path in which the assets should be uploaded'
)
.describe(
'create-github-release',
'Creates a GitHub release for this build, draft if release branch or public if Nightly'
)
.describe(
'linux-repo-name',
'If specified, uploads Linux packages to the given repo name on packagecloud'
)
.wrap(yargs.terminalWidth()).argv;
const releaseVersion = CONFIG.computedAppVersion;
const isNightlyRelease = CONFIG.channel === 'nightly';
const assetsPath = argv.assetsPath || CONFIG.buildOutputPath;
const assetsPattern =
'/**/*(*.exe|*.zip|*.nupkg|*.tar.gz|*.rpm|*.deb|RELEASES*|pulsar-api.json)';
const assets = glob.sync(assetsPattern, { root: assetsPath, nodir: true });
const azureBlobPath = argv.azureBlobPath || `releases/v${releaseVersion}/`;
if (!assets || assets.length === 0) {
console.error(`No assets found under specified path: ${assetsPath}`);
process.exit(1);
}
async function uploadArtifacts() {
let releaseForVersion = await releaseNotes.getRelease(
releaseVersion,
process.env.GITHUB_TOKEN
);
if (releaseForVersion.exists && !releaseForVersion.isDraft) {
console.log(
`Published release already exists for ${releaseVersion}, skipping upload.`
);
return;
}
if (
process.env.ATOM_RELEASES_S3_KEY &&
process.env.ATOM_RELEASES_S3_SECRET &&
process.env.ATOM_RELEASES_S3_BUCKET
) {
console.log(
`Uploading ${
assets.length
} release assets for ${releaseVersion} to Azure Blob Storage under '${azureBlobPath}'`
);
await uploadToAzure(
process.env.ATOM_RELEASES_AZURE_CONN_STRING,
azureBlobPath,
assets
);
} else {
console.log(
'\nEnvironment variables "ATOM_RELEASES_S3_BUCKET", "ATOM_RELEASES_S3_KEY" and/or "ATOM_RELEASES_S3_SECRET" are not set, skipping S3 upload.'
);
}
if (argv.linuxRepoName) {
if (process.env.PACKAGE_CLOUD_API_KEY) {
await uploadLinuxPackages(
argv.linuxRepoName,
process.env.PACKAGE_CLOUD_API_KEY,
releaseVersion,
assets
);
} else {
console.log(
'\nEnvironment variable "PACKAGE_CLOUD_API_KEY" is not set, skipping PackageCloud upload.'
);
}
} else {
console.log(
'\nNo Linux package repo name specified, skipping Linux package upload.'
);
}
const oldReleaseNotes = releaseForVersion.releaseNotes;
if (oldReleaseNotes) {
const oldReleaseNotesPath = path.resolve(
os.tmpdir(),
'OLD_RELEASE_NOTES.md'
);
console.log(
`Saving existing ${releaseVersion} release notes to ${oldReleaseNotesPath}`
);
fs.writeFileSync(oldReleaseNotesPath, oldReleaseNotes, 'utf8');
// This line instructs VSTS to upload the file as an artifact
console.log(
`##vso[artifact.upload containerfolder=OldReleaseNotes;artifactname=OldReleaseNotes;]${oldReleaseNotesPath}`
);
}
if (argv.createGithubRelease) {
console.log(`\nGenerating new release notes for ${releaseVersion}`);
let newReleaseNotes = '';
if (isNightlyRelease) {
newReleaseNotes = await releaseNotes.generateForNightly(
releaseVersion,
process.env.GITHUB_TOKEN,
oldReleaseNotes
);
} else {
newReleaseNotes = await releaseNotes.generateForVersion(
releaseVersion,
process.env.GITHUB_TOKEN,
oldReleaseNotes
);
}
console.log(`New release notes:\n\n${newReleaseNotes}`);
const releaseSha = !isNightlyRelease
? spawnSync('git', ['rev-parse', 'HEAD'])
.stdout.toString()
.trimEnd()
: 'master'; // Nightly tags are created in REPO_OWNER/NIGHTLY_RELEASE_REPO so the SHA is irrelevant
console.log(`Creating GitHub release v${releaseVersion}`);
const release = await publishReleaseAsync({
token: process.env.GITHUB_TOKEN,
owner: REPO_OWNER,
repo: !isNightlyRelease ? MAIN_REPO : NIGHTLY_RELEASE_REPO,
name: CONFIG.computedAppVersion,
notes: newReleaseNotes,
target_commitish: releaseSha,
tag: `v${CONFIG.computedAppVersion}`,
draft: !isNightlyRelease,
prerelease: CONFIG.channel !== 'stable',
editRelease: true,
reuseRelease: true,
skipIfPublished: true,
assets
});
console.log('Release published successfully: ', release.html_url);
} else {
console.log('Skipping GitHub release creation');
}
}
async function publishReleaseAsync(options) {
return new Promise((resolve, reject) => {
publishRelease(options, (err, release) => {
if (err) {
reject(err);
} else {
resolve(release);
}
});
});
}
// Wrap the call the async function and catch errors from its promise because
// Node.js doesn't yet allow use of await at the script scope
uploadArtifacts().catch(err => {
console.error('An error occurred while uploading the release:\n\n', err);
process.exit(1);
});

View File

@ -1,56 +0,0 @@
'use strict';
const glob = require('glob');
const uploadToAzure = require('./lib/upload-to-azure-blob');
const yargs = require('yargs');
const argv = yargs
.usage('Usage: $0 [options]')
.help('help')
.describe(
'crash-report-path',
'The local path of a directory containing crash reports to upload'
)
.describe(
'azure-blob-path',
'Indicates the azure blob storage path in which the crash reports should be uploaded'
)
.wrap(yargs.terminalWidth()).argv;
async function uploadCrashReports() {
const crashesPath = argv.crashReportPath;
const crashes = glob.sync('/*.dmp', { root: crashesPath });
const azureBlobPath = argv.azureBlobPath;
if (crashes && crashes.length > 0) {
console.log(
`Uploading ${
crashes.length
} private crash reports to Azure Blob Storage under '${azureBlobPath}'`
);
await uploadToAzure(
process.env.ATOM_RELEASES_AZURE_CONN_STRING,
azureBlobPath,
crashes,
'private'
);
}
}
if (
process.env.ATOM_RELEASES_S3_KEY &&
process.env.ATOM_RELEASES_S3_SECRET &&
process.env.ATOM_RELEASES_S3_BUCKET
) {
// Wrap the call the async function and catch errors from its promise because
// Node.js doesn't yet allow use of await at the script scope
uploadCrashReports().catch(err => {
console.error('An error occurred while uploading crash reports:\n\n', err);
process.exit(1);
});
} else {
console.log(
'\n\nEnvironment variables "ATOM_RELEASES_S3_BUCKET", "ATOM_RELEASES_S3_KEY" and/or "ATOM_RELEASES_S3_SECRET" are not set, skipping S3 upload.'
);
}

View File

@ -1 +0,0 @@
x64

View File

@ -1 +0,0 @@
x86