mirror of
https://github.com/pulsar-edit/pulsar.git
synced 2024-09-19 06:58:26 +03:00
Removed completely the old "lib" inside scripts
This commit is contained in:
parent
2bb62f84b1
commit
091e10e8c4
@ -1,54 +0,0 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
||||
module.exports = function(packagePath) {
|
||||
const nodeModulesPath = path.join(packagePath, 'node_modules');
|
||||
const nodeModulesBackupPath = path.join(packagePath, 'node_modules.bak');
|
||||
|
||||
if (fs.existsSync(nodeModulesBackupPath)) {
|
||||
throw new Error(
|
||||
'Cannot back up ' +
|
||||
nodeModulesPath +
|
||||
'; ' +
|
||||
nodeModulesBackupPath +
|
||||
' already exists'
|
||||
);
|
||||
}
|
||||
|
||||
// some packages may have no node_modules after deduping, but we still want
|
||||
// to "back-up" and later restore that fact
|
||||
if (!fs.existsSync(nodeModulesPath)) {
|
||||
const msg =
|
||||
'Skipping backing up ' + nodeModulesPath + ' as it does not exist';
|
||||
console.log(msg.gray);
|
||||
|
||||
const restore = function stubRestoreNodeModules() {
|
||||
if (fs.existsSync(nodeModulesPath)) {
|
||||
fs.removeSync(nodeModulesPath);
|
||||
}
|
||||
};
|
||||
|
||||
return { restore, nodeModulesPath, nodeModulesBackupPath };
|
||||
}
|
||||
|
||||
fs.copySync(nodeModulesPath, nodeModulesBackupPath);
|
||||
|
||||
const restore = function restoreNodeModules() {
|
||||
if (!fs.existsSync(nodeModulesBackupPath)) {
|
||||
throw new Error(
|
||||
'Cannot restore ' +
|
||||
nodeModulesPath +
|
||||
'; ' +
|
||||
nodeModulesBackupPath +
|
||||
' does not exist'
|
||||
);
|
||||
}
|
||||
|
||||
if (fs.existsSync(nodeModulesPath)) {
|
||||
fs.removeSync(nodeModulesPath);
|
||||
}
|
||||
fs.renameSync(nodeModulesBackupPath, nodeModulesPath);
|
||||
};
|
||||
|
||||
return { restore, nodeModulesPath, nodeModulesBackupPath };
|
||||
};
|
@ -1,46 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const buildMetadata = require('../package.json');
|
||||
const semver = require('semver');
|
||||
const chromedriverMetadataPath = require('electron-chromedriver/package.json');
|
||||
const mksnapshotMetadataPath = require('electron-mksnapshot/package.json');
|
||||
|
||||
// The enviroment variable is usually set in install-script-dependencies.js
|
||||
const majorElectronVersion = semver.major(
|
||||
process.env.ELECTRON_CUSTOM_VERSION ||
|
||||
require('../config').appMetadata.electronVersion
|
||||
);
|
||||
|
||||
module.exports = function() {
|
||||
// Chromedriver should be at least v9.0.0
|
||||
// Mksnapshot should be at least v9.0.2
|
||||
const chromedriverVer = buildMetadata.dependencies['electron-chromedriver'];
|
||||
const mksnapshotVer = buildMetadata.dependencies['electron-mksnapshot'];
|
||||
const chromedriverActualVer = chromedriverMetadataPath.version;
|
||||
const mksnapshotActualVer = mksnapshotMetadataPath.version;
|
||||
|
||||
// Always use caret on electron-chromedriver so that it can pick up the best minor/patch versions
|
||||
if (!chromedriverVer.startsWith('^')) {
|
||||
throw new Error(
|
||||
`electron-chromedriver version in script/package.json should start with a caret to match latest patch version.`
|
||||
);
|
||||
}
|
||||
|
||||
if (!mksnapshotVer.startsWith('^')) {
|
||||
throw new Error(
|
||||
`electron-mksnapshot version in script/package.json should start with a caret to match latest patch version.`
|
||||
);
|
||||
}
|
||||
|
||||
if (!semver.satisfies(chromedriverActualVer, `>=${majorElectronVersion}`)) {
|
||||
throw new Error(
|
||||
`electron-chromedriver should be at least v${majorElectronVersion} to support the ELECTRON_CUSTOM_VERSION environment variable.`
|
||||
);
|
||||
}
|
||||
|
||||
if (!semver.satisfies(mksnapshotActualVer, `>=${majorElectronVersion}`)) {
|
||||
throw new Error(
|
||||
`electron-mksnapshot should be at least v${majorElectronVersion} to support the ELECTRON_CUSTOM_VERSION environment variable.`
|
||||
);
|
||||
}
|
||||
};
|
@ -1,30 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function() {
|
||||
const cachePaths = [
|
||||
path.join(CONFIG.repositoryRootPath, 'electron'),
|
||||
path.join(CONFIG.atomHomeDirPath, '.node-gyp'),
|
||||
path.join(CONFIG.atomHomeDirPath, 'storage'),
|
||||
path.join(CONFIG.atomHomeDirPath, '.apm'),
|
||||
path.join(CONFIG.atomHomeDirPath, '.npm'),
|
||||
path.join(CONFIG.atomHomeDirPath, 'compile-cache'),
|
||||
path.join(CONFIG.atomHomeDirPath, 'snapshot-cache'),
|
||||
path.join(CONFIG.atomHomeDirPath, 'pulsar-shell'),
|
||||
path.join(CONFIG.atomHomeDirPath, 'electron'),
|
||||
path.join(os.tmpdir(), 'pulsar-build'),
|
||||
path.join(os.tmpdir(), 'pulsar-cached-pulsar-shells')
|
||||
];
|
||||
const rmPromises = [];
|
||||
for (let path of cachePaths) {
|
||||
console.log(`Cleaning ${path}`);
|
||||
rmPromises.push(fs.remove(path));
|
||||
}
|
||||
|
||||
return Promise.all(rmPromises);
|
||||
};
|
@ -1,46 +0,0 @@
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function() {
|
||||
// We can't require fs-extra or glob if `script/bootstrap` has never been run,
|
||||
// because they are third party modules. This is okay because cleaning
|
||||
// dependencies only makes sense if dependencies have been installed at least
|
||||
// once.
|
||||
const fs = require('fs-extra');
|
||||
const glob = require('glob');
|
||||
|
||||
const rmPromises = [];
|
||||
|
||||
const apmDependenciesPath = path.join(CONFIG.apmRootPath, 'node_modules');
|
||||
console.log(`Cleaning ${apmDependenciesPath}`);
|
||||
rmPromises.push(fs.remove(apmDependenciesPath));
|
||||
|
||||
const atomDependenciesPath = path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'node_modules'
|
||||
);
|
||||
console.log(`Cleaning ${atomDependenciesPath}`);
|
||||
rmPromises.push(fs.remove(atomDependenciesPath));
|
||||
|
||||
const scriptDependenciesPath = path.join(
|
||||
CONFIG.scriptRootPath,
|
||||
'node_modules'
|
||||
);
|
||||
console.log(`Cleaning ${scriptDependenciesPath}`);
|
||||
rmPromises.push(fs.remove(scriptDependenciesPath));
|
||||
|
||||
const bundledPackageDependenciesPaths = path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'packages',
|
||||
'**',
|
||||
'node_modules'
|
||||
);
|
||||
for (const bundledPackageDependencyPath of glob.sync(
|
||||
bundledPackageDependenciesPaths
|
||||
)) {
|
||||
rmPromises.push(fs.remove(bundledPackageDependencyPath));
|
||||
}
|
||||
|
||||
return Promise.all(rmPromises);
|
||||
};
|
@ -1,10 +0,0 @@
|
||||
const fs = require('fs-extra');
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function() {
|
||||
if (fs.existsSync(CONFIG.buildOutputPath)) {
|
||||
console.log(`Cleaning ${CONFIG.buildOutputPath}`);
|
||||
return fs.remove(CONFIG.buildOutputPath);
|
||||
}
|
||||
return Promise.resolve();
|
||||
};
|
@ -1,153 +0,0 @@
|
||||
const downloadFileFromGithub = require('./download-file-from-github');
|
||||
const CONFIG = require('../config');
|
||||
const fs = require('fs-extra');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const spawnSync = require('./spawn-sync');
|
||||
const osxSign = require('electron-osx-sign');
|
||||
const macEntitlementsPath = path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'mac',
|
||||
'entitlements.plist'
|
||||
);
|
||||
|
||||
module.exports = async function(packagedAppPath) {
|
||||
if (
|
||||
!process.env.ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL &&
|
||||
!process.env.ATOM_MAC_CODE_SIGNING_CERT_PATH
|
||||
) {
|
||||
console.log(
|
||||
'Skipping code signing because the ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL environment variable is not defined'
|
||||
.gray
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let certPath = process.env.ATOM_MAC_CODE_SIGNING_CERT_PATH;
|
||||
if (!certPath) {
|
||||
certPath = path.join(os.tmpdir(), 'mac.p12');
|
||||
downloadFileFromGithub(
|
||||
process.env.ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL,
|
||||
certPath
|
||||
);
|
||||
}
|
||||
try {
|
||||
console.log(
|
||||
`Ensuring keychain ${process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN} exists`
|
||||
);
|
||||
try {
|
||||
spawnSync(
|
||||
'security',
|
||||
['show-keychain-info', process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN],
|
||||
{ stdio: 'inherit' }
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(
|
||||
`Creating keychain ${process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN}`
|
||||
);
|
||||
// The keychain doesn't exist, try to create it
|
||||
spawnSync(
|
||||
'security',
|
||||
[
|
||||
'create-keychain',
|
||||
'-p',
|
||||
process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN_PASSWORD,
|
||||
process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN
|
||||
],
|
||||
{ stdio: 'inherit' }
|
||||
);
|
||||
|
||||
// List the keychain to "activate" it. Somehow this seems
|
||||
// to be needed otherwise the signing operation fails
|
||||
spawnSync(
|
||||
'security',
|
||||
['list-keychains', '-s', process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN],
|
||||
{ stdio: 'inherit' }
|
||||
);
|
||||
|
||||
// Make sure it doesn't time out before we use it
|
||||
spawnSync(
|
||||
'security',
|
||||
[
|
||||
'set-keychain-settings',
|
||||
'-t',
|
||||
'3600',
|
||||
'-u',
|
||||
process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN
|
||||
],
|
||||
{ stdio: 'inherit' }
|
||||
);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Unlocking keychain ${process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN}`
|
||||
);
|
||||
const unlockArgs = ['unlock-keychain'];
|
||||
// For signing on local workstations, password could be entered interactively
|
||||
if (process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN_PASSWORD) {
|
||||
unlockArgs.push(
|
||||
'-p',
|
||||
process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN_PASSWORD
|
||||
);
|
||||
}
|
||||
unlockArgs.push(process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN);
|
||||
spawnSync('security', unlockArgs, { stdio: 'inherit' });
|
||||
|
||||
console.log(
|
||||
`Importing certificate at ${certPath} into ${
|
||||
process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN
|
||||
} keychain`
|
||||
);
|
||||
spawnSync('security', [
|
||||
'import',
|
||||
certPath,
|
||||
'-P',
|
||||
process.env.ATOM_MAC_CODE_SIGNING_CERT_PASSWORD,
|
||||
'-k',
|
||||
process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN,
|
||||
'-T',
|
||||
'/usr/bin/codesign'
|
||||
]);
|
||||
|
||||
console.log(
|
||||
'Running incantation to suppress dialog when signing on macOS Sierra'
|
||||
);
|
||||
try {
|
||||
spawnSync('security', [
|
||||
'set-key-partition-list',
|
||||
'-S',
|
||||
'apple-tool:,apple:',
|
||||
'-s',
|
||||
'-k',
|
||||
process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN_PASSWORD,
|
||||
process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN
|
||||
]);
|
||||
} catch (e) {
|
||||
console.log("Incantation failed... maybe this isn't Sierra?");
|
||||
}
|
||||
|
||||
console.log(`Code-signing application at ${packagedAppPath}`);
|
||||
|
||||
try {
|
||||
await osxSign.signAsync({
|
||||
app: packagedAppPath,
|
||||
entitlements: macEntitlementsPath,
|
||||
'entitlements-inherit': macEntitlementsPath,
|
||||
identity: 'Developer ID Application: GitHub',
|
||||
keychain: process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN,
|
||||
platform: 'darwin',
|
||||
hardenedRuntime: true
|
||||
});
|
||||
console.info('Application signing complete');
|
||||
} catch (err) {
|
||||
console.error('Applicaiton singing failed');
|
||||
console.error(err);
|
||||
}
|
||||
} finally {
|
||||
if (!process.env.ATOM_MAC_CODE_SIGNING_CERT_PATH) {
|
||||
console.log(`Deleting certificate at ${certPath}`);
|
||||
fs.removeSync(certPath);
|
||||
}
|
||||
}
|
||||
};
|
@ -1,72 +0,0 @@
|
||||
const downloadFileFromGithub = require('./download-file-from-github');
|
||||
const fs = require('fs-extra');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
module.exports = function(filesToSign) {
|
||||
if (
|
||||
!process.env.ATOM_WIN_CODE_SIGNING_CERT_DOWNLOAD_URL &&
|
||||
!process.env.ATOM_WIN_CODE_SIGNING_CERT_PATH
|
||||
) {
|
||||
console.log(
|
||||
'Skipping code signing because the ATOM_WIN_CODE_SIGNING_CERT_DOWNLOAD_URL environment variable is not defined'
|
||||
.gray
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let certPath = process.env.ATOM_WIN_CODE_SIGNING_CERT_PATH;
|
||||
if (!certPath) {
|
||||
certPath = path.join(os.tmpdir(), 'win.p12');
|
||||
downloadFileFromGithub(
|
||||
process.env.ATOM_WIN_CODE_SIGNING_CERT_DOWNLOAD_URL,
|
||||
certPath
|
||||
);
|
||||
}
|
||||
try {
|
||||
for (const fileToSign of filesToSign) {
|
||||
console.log(`Code-signing executable at ${fileToSign}`);
|
||||
signFile(fileToSign);
|
||||
}
|
||||
} finally {
|
||||
if (!process.env.ATOM_WIN_CODE_SIGNING_CERT_PATH) {
|
||||
fs.removeSync(certPath);
|
||||
}
|
||||
}
|
||||
|
||||
function signFile(fileToSign) {
|
||||
const signCommand = path.resolve(
|
||||
__dirname,
|
||||
'..',
|
||||
'node_modules',
|
||||
'@atom',
|
||||
'electron-winstaller',
|
||||
'vendor',
|
||||
'signtool.exe'
|
||||
);
|
||||
const args = [
|
||||
'sign',
|
||||
`/f ${certPath}`, // Signing cert file
|
||||
`/p ${process.env.ATOM_WIN_CODE_SIGNING_CERT_PASSWORD}`, // Signing cert password
|
||||
'/fd sha256', // File digest algorithm
|
||||
'/tr http://timestamp.digicert.com', // Time stamp server
|
||||
'/td sha256', // Times stamp algorithm
|
||||
`"${fileToSign}"`
|
||||
];
|
||||
const result = spawnSync(signCommand, args, {
|
||||
stdio: 'inherit',
|
||||
shell: true
|
||||
});
|
||||
if (result.status !== 0) {
|
||||
// Ensure we do not dump the signing password into the logs if something goes wrong
|
||||
throw new Error(
|
||||
`Command ${signCommand} ${args
|
||||
.map(a =>
|
||||
a.replace(process.env.ATOM_WIN_CODE_SIGNING_CERT_PASSWORD, '******')
|
||||
)
|
||||
.join(' ')} exited with code ${result.status}`
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
@ -1,68 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const spawnSync = require('./spawn-sync');
|
||||
const { path7za } = require('7zip-bin');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function(packagedAppPath) {
|
||||
const appArchivePath = path.join(CONFIG.buildOutputPath, getArchiveName());
|
||||
compress(packagedAppPath, appArchivePath);
|
||||
|
||||
if (process.platform === 'darwin') {
|
||||
const symbolsArchivePath = path.join(
|
||||
CONFIG.buildOutputPath,
|
||||
'pulsar-mac-symbols.zip'
|
||||
);
|
||||
compress(CONFIG.symbolsPath, symbolsArchivePath);
|
||||
}
|
||||
};
|
||||
|
||||
function getArchiveName() {
|
||||
switch (process.platform) {
|
||||
case 'darwin':
|
||||
return 'pulsar-mac.zip';
|
||||
case 'win32':
|
||||
return `pulsar-${process.arch === 'x64' ? 'x64-' : ''}windows.zip`;
|
||||
default:
|
||||
return `pulsar-${getLinuxArchiveArch()}.tar.gz`;
|
||||
}
|
||||
}
|
||||
|
||||
function getLinuxArchiveArch() {
|
||||
switch (process.arch) {
|
||||
case 'ia32':
|
||||
return 'i386';
|
||||
case 'x64':
|
||||
return 'amd64';
|
||||
default:
|
||||
return process.arch;
|
||||
}
|
||||
}
|
||||
|
||||
function compress(inputDirPath, outputArchivePath) {
|
||||
if (fs.existsSync(outputArchivePath)) {
|
||||
console.log(`Deleting "${outputArchivePath}"`);
|
||||
fs.removeSync(outputArchivePath);
|
||||
}
|
||||
|
||||
console.log(`Compressing "${inputDirPath}" to "${outputArchivePath}"`);
|
||||
let compressCommand, compressArguments;
|
||||
if (process.platform === 'darwin') {
|
||||
compressCommand = 'zip';
|
||||
compressArguments = ['-r', '--symlinks'];
|
||||
} else if (process.platform === 'win32') {
|
||||
compressCommand = path7za;
|
||||
compressArguments = ['a', '-r'];
|
||||
} else {
|
||||
compressCommand = 'tar';
|
||||
compressArguments = ['caf'];
|
||||
}
|
||||
compressArguments.push(outputArchivePath, path.basename(inputDirPath));
|
||||
spawnSync(compressCommand, compressArguments, {
|
||||
cwd: path.dirname(inputDirPath),
|
||||
maxBuffer: 2024 * 2024
|
||||
});
|
||||
}
|
@ -1,81 +0,0 @@
|
||||
// This module exports a function that copies all the static assets into the
|
||||
// appropriate location in the build output directory.
|
||||
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const CONFIG = require('../config');
|
||||
const glob = require('glob');
|
||||
const includePathInPackagedApp = require('./include-path-in-packaged-app');
|
||||
|
||||
module.exports = function() {
|
||||
console.log(`Copying assets to ${CONFIG.intermediateAppPath}`);
|
||||
let srcPaths = [
|
||||
path.join(CONFIG.repositoryRootPath, 'dot-atom'),
|
||||
path.join(CONFIG.repositoryRootPath, 'exports'),
|
||||
path.join(CONFIG.repositoryRootPath, 'package.json'),
|
||||
path.join(CONFIG.repositoryRootPath, 'static'),
|
||||
path.join(CONFIG.repositoryRootPath, 'src'),
|
||||
path.join(CONFIG.repositoryRootPath, 'vendor')
|
||||
];
|
||||
srcPaths = srcPaths.concat(
|
||||
glob.sync(path.join(CONFIG.repositoryRootPath, 'spec', '*.*'), {
|
||||
ignore: path.join('**', '*-spec.*')
|
||||
})
|
||||
);
|
||||
|
||||
const copyPromises = [];
|
||||
for (let srcPath of srcPaths) {
|
||||
copyPromises.push(
|
||||
fs.copy(srcPath, computeDestinationPath(srcPath), {
|
||||
filter: includePathInPackagedApp
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Run a copy pass to dereference symlinked directories under node_modules.
|
||||
// We do this to ensure that symlinked repo-local bundled packages get
|
||||
// copied to the output folder correctly. We dereference only the top-level
|
||||
// symlinks and not nested symlinks to avoid issues where symlinked binaries
|
||||
// are duplicated in Pulsar's installation packages (see atom/atom#18490).
|
||||
const nodeModulesPath = path.join(CONFIG.repositoryRootPath, 'node_modules');
|
||||
glob
|
||||
.sync(path.join(nodeModulesPath, '*'))
|
||||
.map(p =>
|
||||
fs.lstatSync(p).isSymbolicLink()
|
||||
? path.resolve(nodeModulesPath, fs.readlinkSync(p))
|
||||
: p
|
||||
)
|
||||
.forEach(modulePath => {
|
||||
const destPath = path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
path.basename(modulePath)
|
||||
);
|
||||
copyPromises.push(
|
||||
fs.copy(modulePath, destPath, { filter: includePathInPackagedApp })
|
||||
);
|
||||
});
|
||||
|
||||
copyPromises.push(
|
||||
fs.copy(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'app-icons',
|
||||
CONFIG.channel,
|
||||
'png',
|
||||
'1024.png'
|
||||
),
|
||||
path.join(CONFIG.intermediateAppPath, 'resources', 'pulsar.png')
|
||||
)
|
||||
);
|
||||
|
||||
return Promise.all(copyPromises);
|
||||
};
|
||||
|
||||
function computeDestinationPath(srcPath) {
|
||||
const relativePath = path.relative(CONFIG.repositoryRootPath, srcPath);
|
||||
return path.join(CONFIG.intermediateAppPath, relativePath);
|
||||
}
|
@ -1,234 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const os = require('os');
|
||||
const path = require('path');
|
||||
const spawnSync = require('./spawn-sync');
|
||||
const template = require('lodash.template');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function(packagedAppPath) {
|
||||
console.log(`Creating Debian package for "${packagedAppPath}"`);
|
||||
const editorExecutableName =
|
||||
CONFIG.channel === 'stable' ? 'pulsar' : `pulsar-${CONFIG.channel}`;
|
||||
const pkgMgrExecutableName =
|
||||
CONFIG.channel === 'stable' ? 'apm' : `apm-${CONFIG.channel}`;
|
||||
const appDescription = CONFIG.appMetadata.description;
|
||||
const appVersion = CONFIG.appMetadata.version;
|
||||
let arch;
|
||||
if (process.arch === 'ia32') {
|
||||
arch = 'i386';
|
||||
} else if (process.arch === 'x64') {
|
||||
arch = 'amd64';
|
||||
} else if (process.arch === 'ppc') {
|
||||
arch = 'powerpc';
|
||||
} else {
|
||||
arch = process.arch;
|
||||
}
|
||||
|
||||
const outputDebianPackageFilePath = path.join(
|
||||
CONFIG.buildOutputPath,
|
||||
`pulsar-${arch}.deb`
|
||||
);
|
||||
const debianPackageDirPath = path.join(
|
||||
os.tmpdir(),
|
||||
path.basename(packagedAppPath)
|
||||
);
|
||||
const debianPackageConfigPath = path.join(debianPackageDirPath, 'DEBIAN');
|
||||
const debianPackageInstallDirPath = path.join(debianPackageDirPath, 'usr');
|
||||
const debianPackageBinDirPath = path.join(debianPackageInstallDirPath, 'bin');
|
||||
const debianPackageShareDirPath = path.join(
|
||||
debianPackageInstallDirPath,
|
||||
'share'
|
||||
);
|
||||
const debianPackageAtomDirPath = path.join(
|
||||
debianPackageShareDirPath,
|
||||
editorExecutableName
|
||||
);
|
||||
const debianPackageApplicationsDirPath = path.join(
|
||||
debianPackageShareDirPath,
|
||||
'applications'
|
||||
);
|
||||
const debianPackageIconsDirPath = path.join(
|
||||
debianPackageShareDirPath,
|
||||
'pixmaps'
|
||||
);
|
||||
const debianPackageDocsDirPath = path.join(
|
||||
debianPackageShareDirPath,
|
||||
'doc',
|
||||
editorExecutableName
|
||||
);
|
||||
|
||||
if (fs.existsSync(debianPackageDirPath)) {
|
||||
console.log(
|
||||
`Deleting existing build dir for Debian package at "${debianPackageDirPath}"`
|
||||
);
|
||||
fs.removeSync(debianPackageDirPath);
|
||||
}
|
||||
if (fs.existsSync(`${debianPackageDirPath}.deb`)) {
|
||||
console.log(
|
||||
`Deleting existing Debian package at "${debianPackageDirPath}.deb"`
|
||||
);
|
||||
fs.removeSync(`${debianPackageDirPath}.deb`);
|
||||
}
|
||||
if (fs.existsSync(debianPackageDirPath)) {
|
||||
console.log(
|
||||
`Deleting existing Debian package at "${outputDebianPackageFilePath}"`
|
||||
);
|
||||
fs.removeSync(debianPackageDirPath);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Creating Debian package directory structure at "${debianPackageDirPath}"`
|
||||
);
|
||||
fs.mkdirpSync(debianPackageDirPath);
|
||||
fs.mkdirpSync(debianPackageConfigPath);
|
||||
fs.mkdirpSync(debianPackageInstallDirPath);
|
||||
fs.mkdirpSync(debianPackageShareDirPath);
|
||||
fs.mkdirpSync(debianPackageApplicationsDirPath);
|
||||
fs.mkdirpSync(debianPackageIconsDirPath);
|
||||
fs.mkdirpSync(debianPackageDocsDirPath);
|
||||
fs.mkdirpSync(debianPackageBinDirPath);
|
||||
|
||||
console.log(`Copying "${packagedAppPath}" to "${debianPackageAtomDirPath}"`);
|
||||
fs.copySync(packagedAppPath, debianPackageAtomDirPath);
|
||||
fs.chmodSync(debianPackageAtomDirPath, '755');
|
||||
|
||||
console.log(`Copying binaries into "${debianPackageBinDirPath}"`);
|
||||
fs.copySync(
|
||||
path.join(CONFIG.repositoryRootPath, 'pulsar.sh'),
|
||||
path.join(debianPackageBinDirPath, editorExecutableName)
|
||||
);
|
||||
fs.symlinkSync(
|
||||
path.join(
|
||||
'..',
|
||||
'share',
|
||||
editorExecutableName,
|
||||
'resources',
|
||||
'app',
|
||||
'apm',
|
||||
'node_modules',
|
||||
'.bin',
|
||||
'apm'
|
||||
),
|
||||
path.join(debianPackageBinDirPath, pkgMgrExecutableName)
|
||||
);
|
||||
|
||||
fs.chmodSync(path.join(debianPackageAtomDirPath, 'chrome-sandbox'), '4755');
|
||||
|
||||
console.log(`Writing control file into "${debianPackageConfigPath}"`);
|
||||
const packageSizeInKilobytes = spawnSync('du', ['-sk', packagedAppPath])
|
||||
.stdout.toString()
|
||||
.split(/\s+/)[0];
|
||||
const controlFileTemplate = fs.readFileSync(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'linux',
|
||||
'debian',
|
||||
'control.in'
|
||||
)
|
||||
);
|
||||
const controlFileContents = template(controlFileTemplate)({
|
||||
appFileName: editorExecutableName,
|
||||
version: appVersion,
|
||||
arch: arch,
|
||||
installedSize: packageSizeInKilobytes,
|
||||
description: appDescription
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(debianPackageConfigPath, 'control'),
|
||||
controlFileContents
|
||||
);
|
||||
|
||||
console.log(
|
||||
`Writing desktop entry file into "${debianPackageApplicationsDirPath}"`
|
||||
);
|
||||
const desktopEntryTemplate = fs.readFileSync(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'linux',
|
||||
'pulsar.desktop.in'
|
||||
)
|
||||
);
|
||||
const desktopEntryContents = template(desktopEntryTemplate)({
|
||||
appName: CONFIG.appName,
|
||||
appFileName: editorExecutableName,
|
||||
description: appDescription,
|
||||
installDir: '/usr',
|
||||
iconPath: editorExecutableName
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(
|
||||
debianPackageApplicationsDirPath,
|
||||
`${editorExecutableName}.desktop`
|
||||
),
|
||||
desktopEntryContents
|
||||
);
|
||||
|
||||
console.log(`Copying icon into "${debianPackageIconsDirPath}"`);
|
||||
fs.copySync(
|
||||
path.join(
|
||||
packagedAppPath,
|
||||
'resources',
|
||||
'app.asar.unpacked',
|
||||
'resources',
|
||||
'pulsar.png'
|
||||
),
|
||||
path.join(debianPackageIconsDirPath, `${editorExecutableName}.png`)
|
||||
);
|
||||
|
||||
console.log(`Copying license into "${debianPackageDocsDirPath}"`);
|
||||
fs.copySync(
|
||||
path.join(packagedAppPath, 'resources', 'LICENSE.md'),
|
||||
path.join(debianPackageDocsDirPath, 'copyright')
|
||||
);
|
||||
|
||||
console.log(
|
||||
`Copying polkit configuration into "${debianPackageShareDirPath}"`
|
||||
);
|
||||
fs.copySync(
|
||||
path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'pulsar.policy'),
|
||||
path.join(
|
||||
debianPackageShareDirPath,
|
||||
'polkit-1',
|
||||
'actions',
|
||||
`pulsar-${CONFIG.channel}.policy`
|
||||
)
|
||||
);
|
||||
|
||||
console.log(`Generating .deb file from ${debianPackageDirPath}`);
|
||||
|
||||
// don't compress by default to speed up build
|
||||
let compressionLevel = 0;
|
||||
let compressionType = 'none';
|
||||
if (process.env.IS_RELEASE_BRANCH || process.env.IS_SIGNED_ZIP_BRANCH) {
|
||||
compressionLevel = 6;
|
||||
compressionType = 'xz';
|
||||
}
|
||||
// use sudo if available to speed up build
|
||||
let sudoCommand = 'fakeroot';
|
||||
if (process.env.CI || (process.getuid && process.getuid() === 0)) {
|
||||
sudoCommand = 'sudo';
|
||||
}
|
||||
spawnSync(
|
||||
sudoCommand,
|
||||
[
|
||||
'dpkg-deb',
|
||||
`-Z${compressionType}`,
|
||||
`-z${compressionLevel}`,
|
||||
'-b',
|
||||
debianPackageDirPath
|
||||
],
|
||||
{
|
||||
stdio: 'inherit'
|
||||
}
|
||||
);
|
||||
|
||||
console.log(
|
||||
`Copying generated package into "${outputDebianPackageFilePath}"`
|
||||
);
|
||||
fs.copySync(`${debianPackageDirPath}.deb`, outputDebianPackageFilePath);
|
||||
};
|
@ -1,147 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const spawnSync = require('./spawn-sync');
|
||||
const template = require('lodash.template');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function(packagedAppPath) {
|
||||
console.log(`Creating rpm package for "${packagedAppPath}"`);
|
||||
const editorExecutableName =
|
||||
CONFIG.channel === 'stable' ? 'pulsar' : `pulsar-${CONFIG.channel}`;
|
||||
const pkgMgrExecutableName =
|
||||
CONFIG.channel === 'stable' ? 'apm' : `apm-${CONFIG.channel}`;
|
||||
const appName = CONFIG.appName;
|
||||
const appDescription = CONFIG.appMetadata.description;
|
||||
// RPM versions can't have dashes or tildes in them.
|
||||
// (Ref.: https://twiki.cern.ch/twiki/bin/view/Main/RPMAndDebVersioning)
|
||||
const appVersion = CONFIG.appMetadata.version.replace(/-/g, '.');
|
||||
const policyFileName = `pulsar-${CONFIG.channel}.policy`;
|
||||
|
||||
const rpmPackageDirPath = path.join(CONFIG.homeDirPath, 'rpmbuild');
|
||||
const rpmPackageBuildDirPath = path.join(rpmPackageDirPath, 'BUILD');
|
||||
const rpmPackageSourcesDirPath = path.join(rpmPackageDirPath, 'SOURCES');
|
||||
const rpmPackageSpecsDirPath = path.join(rpmPackageDirPath, 'SPECS');
|
||||
const rpmPackageRpmsDirPath = path.join(rpmPackageDirPath, 'RPMS');
|
||||
const rpmPackageApplicationDirPath = path.join(
|
||||
rpmPackageBuildDirPath,
|
||||
appName
|
||||
);
|
||||
const rpmPackageIconsDirPath = path.join(rpmPackageBuildDirPath, 'icons');
|
||||
|
||||
if (fs.existsSync(rpmPackageDirPath)) {
|
||||
console.log(
|
||||
`Deleting existing rpm build directory at "${rpmPackageDirPath}"`
|
||||
);
|
||||
fs.removeSync(rpmPackageDirPath);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Creating rpm package directory structure at "${rpmPackageDirPath}"`
|
||||
);
|
||||
fs.mkdirpSync(rpmPackageDirPath);
|
||||
fs.mkdirpSync(rpmPackageBuildDirPath);
|
||||
fs.mkdirpSync(rpmPackageSourcesDirPath);
|
||||
fs.mkdirpSync(rpmPackageSpecsDirPath);
|
||||
|
||||
console.log(
|
||||
`Copying "${packagedAppPath}" to "${rpmPackageApplicationDirPath}"`
|
||||
);
|
||||
fs.copySync(packagedAppPath, rpmPackageApplicationDirPath);
|
||||
|
||||
console.log(`Copying icons into "${rpmPackageIconsDirPath}"`);
|
||||
fs.copySync(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'app-icons',
|
||||
CONFIG.channel,
|
||||
'png'
|
||||
),
|
||||
rpmPackageIconsDirPath
|
||||
);
|
||||
|
||||
console.log(`Writing rpm package spec file into "${rpmPackageSpecsDirPath}"`);
|
||||
const rpmPackageSpecFilePath = path.join(rpmPackageSpecsDirPath, 'pulsar.spec');
|
||||
const rpmPackageSpecsTemplate = fs.readFileSync(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'linux',
|
||||
'redhat',
|
||||
'pulsar.spec.in'
|
||||
)
|
||||
);
|
||||
const rpmPackageSpecsContents = template(rpmPackageSpecsTemplate)({
|
||||
appName: appName,
|
||||
appFileName: editorExecutableName,
|
||||
apmFileName: pkgMgrExecutableName,
|
||||
description: appDescription,
|
||||
installDir: '/usr',
|
||||
version: appVersion,
|
||||
policyFileName
|
||||
});
|
||||
fs.writeFileSync(rpmPackageSpecFilePath, rpmPackageSpecsContents);
|
||||
|
||||
console.log(`Writing desktop entry file into "${rpmPackageBuildDirPath}"`);
|
||||
const desktopEntryTemplate = fs.readFileSync(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'linux',
|
||||
'pulsar.desktop.in'
|
||||
)
|
||||
);
|
||||
const desktopEntryContents = template(desktopEntryTemplate)({
|
||||
appName: appName,
|
||||
appFileName: editorExecutableName,
|
||||
description: appDescription,
|
||||
installDir: '/usr',
|
||||
iconPath: editorExecutableName
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(rpmPackageBuildDirPath, `${editorExecutableName}.desktop`),
|
||||
desktopEntryContents
|
||||
);
|
||||
|
||||
console.log(`Copying pulsar.sh into "${rpmPackageBuildDirPath}"`);
|
||||
fs.copySync(
|
||||
path.join(CONFIG.repositoryRootPath, 'pulsar.sh'),
|
||||
path.join(rpmPackageBuildDirPath, 'pulsar.sh')
|
||||
);
|
||||
|
||||
console.log(`Copying pulsar.policy into "${rpmPackageBuildDirPath}"`);
|
||||
fs.copySync(
|
||||
path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'pulsar.policy'),
|
||||
path.join(rpmPackageBuildDirPath, policyFileName)
|
||||
);
|
||||
|
||||
console.log(`Generating .rpm package from "${rpmPackageDirPath}"`);
|
||||
spawnSync('rpmbuild', ['-ba', '--clean', rpmPackageSpecFilePath]);
|
||||
for (let generatedArch of fs.readdirSync(rpmPackageRpmsDirPath)) {
|
||||
const generatedArchDirPath = path.join(
|
||||
rpmPackageRpmsDirPath,
|
||||
generatedArch
|
||||
);
|
||||
const generatedPackageFileNames = fs.readdirSync(generatedArchDirPath);
|
||||
assert(
|
||||
generatedPackageFileNames.length === 1,
|
||||
'Generated more than one rpm package'
|
||||
);
|
||||
const generatedPackageFilePath = path.join(
|
||||
generatedArchDirPath,
|
||||
generatedPackageFileNames[0]
|
||||
);
|
||||
const outputRpmPackageFilePath = path.join(
|
||||
CONFIG.buildOutputPath,
|
||||
`pulsar.${generatedArch}.rpm`
|
||||
);
|
||||
console.log(
|
||||
`Copying "${generatedPackageFilePath}" into "${outputRpmPackageFilePath}"`
|
||||
);
|
||||
fs.copySync(generatedPackageFilePath, outputRpmPackageFilePath);
|
||||
}
|
||||
};
|
@ -1,83 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const electronInstaller = require('@atom/electron-winstaller');
|
||||
const fs = require('fs');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
const { REPO_OWNER, MAIN_REPO } = CONFIG;
|
||||
|
||||
module.exports = packagedAppPath => {
|
||||
const archSuffix = process.arch === 'ia32' ? '' : '-' + process.arch;
|
||||
const updateUrlPrefix =
|
||||
process.env.ATOM_UPDATE_URL_PREFIX || 'https://atom.io';
|
||||
const options = {
|
||||
name: CONFIG.channelName,
|
||||
title: CONFIG.appName,
|
||||
exe: CONFIG.executableName,
|
||||
appDirectory: packagedAppPath,
|
||||
authors: 'GitHub Inc.',
|
||||
iconUrl: `https://raw.githubusercontent.com/${REPO_OWNER}/${MAIN_REPO}/master/resources/app-icons/${
|
||||
CONFIG.channel
|
||||
}/pulsar.ico`,
|
||||
loadingGif: path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'win',
|
||||
'loading.gif'
|
||||
),
|
||||
outputDirectory: CONFIG.buildOutputPath,
|
||||
noMsi: true,
|
||||
remoteReleases: `${updateUrlPrefix}/api/updates${archSuffix}?version=${
|
||||
CONFIG.computedAppVersion
|
||||
}`,
|
||||
setupExe: `PulsarSetup${process.arch === 'x64' ? '-x64' : ''}.exe`,
|
||||
setupIcon: path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'app-icons',
|
||||
CONFIG.channel,
|
||||
'pulsar.ico'
|
||||
)
|
||||
};
|
||||
|
||||
const cleanUp = () => {
|
||||
const releasesPath = `${CONFIG.buildOutputPath}/RELEASES`;
|
||||
if (process.arch === 'x64' && fs.existsSync(releasesPath)) {
|
||||
fs.renameSync(releasesPath, `${releasesPath}-x64`);
|
||||
}
|
||||
|
||||
let appName =
|
||||
CONFIG.channel === 'stable' ? 'pulsar' : `pulsar-${CONFIG.channel}`;
|
||||
for (let nupkgPath of glob.sync(
|
||||
`${CONFIG.buildOutputPath}/${appName}-*.nupkg`
|
||||
)) {
|
||||
if (!nupkgPath.includes(CONFIG.computedAppVersion)) {
|
||||
console.log(
|
||||
`Deleting downloaded nupkg for previous version at ${nupkgPath} to prevent it from being stored as an artifact`
|
||||
);
|
||||
fs.unlinkSync(nupkgPath);
|
||||
} else {
|
||||
if (process.arch === 'x64') {
|
||||
// Use the original .nupkg filename to generate the `pulsar-x64` name by inserting `-x64` after `pulsar`
|
||||
const newNupkgPath = nupkgPath.replace(
|
||||
`${appName}-`,
|
||||
`${appName}-x64-`
|
||||
);
|
||||
fs.renameSync(nupkgPath, newNupkgPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return `${CONFIG.buildOutputPath}/${options.setupExe}`;
|
||||
};
|
||||
|
||||
console.log(`Creating Windows Installer for ${packagedAppPath}`);
|
||||
return electronInstaller
|
||||
.createWindowsInstaller(options)
|
||||
.then(cleanUp, error => {
|
||||
cleanUp();
|
||||
return Promise.reject(error);
|
||||
});
|
||||
};
|
@ -1,22 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
module.exports = function() {
|
||||
process.env['PATH'] = process.env['PATH']
|
||||
.split(';')
|
||||
.filter(function(p) {
|
||||
if (fs.existsSync(path.join(p, 'msbuild.exe'))) {
|
||||
console.log(
|
||||
'Excluding "' +
|
||||
p +
|
||||
'" from PATH to avoid msbuild.exe mismatch that causes errors during module installation'
|
||||
);
|
||||
return false;
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
})
|
||||
.join(';');
|
||||
};
|
@ -1,49 +0,0 @@
|
||||
const crypto = require('crypto');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
const FINGERPRINT_PATH = path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'node_modules',
|
||||
'.dependencies-fingerprint'
|
||||
);
|
||||
|
||||
module.exports = {
|
||||
write: function() {
|
||||
const fingerprint = this.compute();
|
||||
fs.writeFileSync(FINGERPRINT_PATH, fingerprint);
|
||||
console.log(
|
||||
'Wrote Dependencies Fingerprint:',
|
||||
FINGERPRINT_PATH,
|
||||
fingerprint
|
||||
);
|
||||
},
|
||||
read: function() {
|
||||
return fs.existsSync(FINGERPRINT_PATH)
|
||||
? fs.readFileSync(FINGERPRINT_PATH, 'utf8')
|
||||
: null;
|
||||
},
|
||||
isOutdated: function() {
|
||||
const fingerprint = this.read();
|
||||
return fingerprint ? fingerprint !== this.compute() : false;
|
||||
},
|
||||
compute: function() {
|
||||
// Include the electron minor version in the fingerprint since that changing requires a re-install
|
||||
const electronVersion = CONFIG.appMetadata.electronVersion.replace(
|
||||
/\.\d+$/,
|
||||
''
|
||||
);
|
||||
const apmVersion = CONFIG.apmMetadata.dependencies['pulsar-package-manager'];
|
||||
const body =
|
||||
electronVersion +
|
||||
apmVersion +
|
||||
process.platform +
|
||||
process.version +
|
||||
process.arch;
|
||||
return crypto
|
||||
.createHash('sha1')
|
||||
.update(body)
|
||||
.digest('hex');
|
||||
}
|
||||
};
|
@ -1,25 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const syncRequest = require('sync-request');
|
||||
|
||||
module.exports = function(downloadURL, destinationPath) {
|
||||
console.log(`Downloading file from GitHub Repository to ${destinationPath}`);
|
||||
const response = syncRequest('GET', downloadURL, {
|
||||
headers: {
|
||||
Accept: 'application/vnd.github.v3.raw',
|
||||
'User-Agent': 'Pulsar Build',
|
||||
Authorization: `token ${process.env.GITHUB_TOKEN}`
|
||||
}
|
||||
});
|
||||
|
||||
if (response.statusCode === 200) {
|
||||
fs.mkdirpSync(path.dirname(destinationPath));
|
||||
fs.writeFileSync(destinationPath, response.body);
|
||||
} else {
|
||||
throw new Error(
|
||||
'Error downloading file. HTTP Status ' + response.statusCode + '.'
|
||||
);
|
||||
}
|
||||
};
|
@ -1,61 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
module.exports = function() {
|
||||
if (process.platform === 'win32') {
|
||||
console.log(
|
||||
'Skipping symbol dumping because minidump is not supported on Windows'
|
||||
.gray
|
||||
);
|
||||
return Promise.resolve();
|
||||
} else {
|
||||
console.log(`Dumping symbols in ${CONFIG.symbolsPath}`);
|
||||
const binaryPaths = glob.sync(
|
||||
path.join(CONFIG.intermediateAppPath, 'node_modules', '**', '*.node')
|
||||
);
|
||||
return Promise.all(binaryPaths.map(dumpSymbol));
|
||||
}
|
||||
};
|
||||
|
||||
function dumpSymbol(binaryPath) {
|
||||
const minidump = require('minidump');
|
||||
|
||||
return new Promise(function(resolve, reject) {
|
||||
minidump.dumpSymbol(binaryPath, function(error, content) {
|
||||
if (error) {
|
||||
// fswin.node is only used on windows, ignore the error on other platforms
|
||||
if (process.platform !== 'win32') {
|
||||
console.warn(
|
||||
`\n##[warning] Failed to dump the symbols via minidump. ${error}. Ignoring the error...`
|
||||
);
|
||||
return resolve();
|
||||
}
|
||||
throw new Error(error);
|
||||
} else {
|
||||
const moduleLine = /MODULE [^ ]+ [^ ]+ ([0-9A-F]+) (.*)\n/.exec(
|
||||
content
|
||||
);
|
||||
if (moduleLine.length !== 3) {
|
||||
const errorMessage = `Invalid output when dumping symbol for ${binaryPath}`;
|
||||
console.error(errorMessage);
|
||||
throw new Error(errorMessage);
|
||||
} else {
|
||||
const filename = moduleLine[2];
|
||||
const symbolDirPath = path.join(
|
||||
CONFIG.symbolsPath,
|
||||
filename,
|
||||
moduleLine[1]
|
||||
);
|
||||
const symbolFilePath = path.join(symbolDirPath, `${filename}.sym`);
|
||||
fs.mkdirpSync(symbolDirPath);
|
||||
fs.writeFileSync(symbolFilePath, content);
|
||||
resolve();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
@ -1,21 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const glob = require('glob');
|
||||
|
||||
module.exports = function(globPaths) {
|
||||
return Promise.all(globPaths.map(g => expandGlobPath(g))).then(paths =>
|
||||
paths.reduce((a, b) => a.concat(b), [])
|
||||
);
|
||||
};
|
||||
|
||||
function expandGlobPath(globPath) {
|
||||
return new Promise((resolve, reject) => {
|
||||
glob(globPath, (error, paths) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve(paths);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
@ -1,55 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const donna = require('donna');
|
||||
const tello = require('tello');
|
||||
const joanna = require('joanna');
|
||||
const glob = require('glob');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function() {
|
||||
const generatedJSONPath = path.join(CONFIG.docsOutputPath, 'pulsar-api.json');
|
||||
console.log(`Generating API docs at ${generatedJSONPath}`);
|
||||
|
||||
// Unfortunately, correct relative paths depend on a specific working
|
||||
// directory, but this script should be able to run from anywhere, so we
|
||||
// muck with the cwd temporarily.
|
||||
const oldWorkingDirectoryPath = process.cwd();
|
||||
process.chdir(CONFIG.repositoryRootPath);
|
||||
const coffeeMetadata = donna.generateMetadata(['.'])[0];
|
||||
const jsMetadata = joanna(glob.sync(`src/**/*.js`));
|
||||
process.chdir(oldWorkingDirectoryPath);
|
||||
|
||||
const metadata = {
|
||||
repository: coffeeMetadata.repository,
|
||||
version: coffeeMetadata.version,
|
||||
files: Object.assign(coffeeMetadata.files, jsMetadata.files)
|
||||
};
|
||||
|
||||
const api = tello.digest([metadata]);
|
||||
Object.assign(api.classes, getAPIDocsForDependencies());
|
||||
api.classes = sortObjectByKey(api.classes);
|
||||
|
||||
fs.mkdirpSync(CONFIG.docsOutputPath);
|
||||
fs.writeFileSync(generatedJSONPath, JSON.stringify(api, null, 2));
|
||||
};
|
||||
|
||||
function getAPIDocsForDependencies() {
|
||||
const classes = {};
|
||||
for (let apiJSONPath of glob.sync(
|
||||
`${CONFIG.repositoryRootPath}/node_modules/*/api.json`
|
||||
)) {
|
||||
Object.assign(classes, require(apiJSONPath).classes);
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
|
||||
function sortObjectByKey(object) {
|
||||
const sortedObject = {};
|
||||
for (let keyName of Object.keys(object).sort()) {
|
||||
sortedObject[keyName] = object[keyName];
|
||||
}
|
||||
return sortedObject;
|
||||
}
|
@ -1,291 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const CSON = require('season');
|
||||
const deprecatedPackagesMetadata = require('../deprecated-packages');
|
||||
const fs = require('fs-plus');
|
||||
const normalizePackageData = require('normalize-package-data');
|
||||
const path = require('path');
|
||||
const semver = require('semver');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
let appName = CONFIG.appMetadata.name;
|
||||
if (process.platform === 'win32') {
|
||||
// Use the channel name in the app name on Windows so that the installer will
|
||||
// place it in a different folder in AppData\Local
|
||||
appName = CONFIG.channel === 'stable' ? 'pulsar' : `pulsar-${CONFIG.channel}`;
|
||||
}
|
||||
|
||||
module.exports = function() {
|
||||
console.log(
|
||||
`Generating metadata for ${path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'package.json'
|
||||
)}`
|
||||
);
|
||||
CONFIG.appMetadata._atomPackages = buildBundledPackagesMetadata();
|
||||
CONFIG.appMetadata._atomMenu = buildPlatformMenuMetadata();
|
||||
CONFIG.appMetadata._atomKeymaps = buildPlatformKeymapsMetadata();
|
||||
CONFIG.appMetadata._deprecatedPackages = deprecatedPackagesMetadata;
|
||||
CONFIG.appMetadata.version = CONFIG.computedAppVersion;
|
||||
CONFIG.appMetadata.name = appName;
|
||||
CONFIG.appMetadata.productName = CONFIG.appName;
|
||||
checkDeprecatedPackagesMetadata();
|
||||
fs.writeFileSync(
|
||||
path.join(CONFIG.intermediateAppPath, 'package.json'),
|
||||
JSON.stringify(CONFIG.appMetadata)
|
||||
);
|
||||
};
|
||||
|
||||
module.exports = function() {
|
||||
console.log(
|
||||
`Generating metadata for ${path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'package.json'
|
||||
)}`
|
||||
);
|
||||
CONFIG.appMetadata._atomPackages = buildBundledPackagesMetadata();
|
||||
CONFIG.appMetadata._atomMenu = buildPlatformMenuMetadata();
|
||||
CONFIG.appMetadata._atomKeymaps = buildPlatformKeymapsMetadata();
|
||||
CONFIG.appMetadata._deprecatedPackages = deprecatedPackagesMetadata;
|
||||
CONFIG.appMetadata.version = CONFIG.computedAppVersion;
|
||||
checkDeprecatedPackagesMetadata();
|
||||
fs.writeFileSync(
|
||||
path.join(CONFIG.intermediateAppPath, 'package.json'),
|
||||
JSON.stringify(CONFIG.appMetadata)
|
||||
);
|
||||
};
|
||||
|
||||
function buildBundledPackagesMetadata() {
|
||||
const packages = {};
|
||||
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
|
||||
const packagePath = path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName
|
||||
);
|
||||
const packageMetadataPath = path.join(packagePath, 'package.json');
|
||||
const packageMetadata = JSON.parse(
|
||||
fs.readFileSync(packageMetadataPath, 'utf8')
|
||||
);
|
||||
normalizePackageData(
|
||||
packageMetadata,
|
||||
msg => {
|
||||
if (!msg.match(/No README data$/)) {
|
||||
console.warn(
|
||||
`Invalid package metadata. ${packageMetadata.name}: ${msg}`
|
||||
);
|
||||
}
|
||||
},
|
||||
true
|
||||
);
|
||||
if (
|
||||
packageMetadata.repository &&
|
||||
packageMetadata.repository.url &&
|
||||
packageMetadata.repository.type === 'git'
|
||||
) {
|
||||
packageMetadata.repository.url = packageMetadata.repository.url.replace(
|
||||
/^git\+/,
|
||||
''
|
||||
);
|
||||
}
|
||||
|
||||
delete packageMetadata['_from'];
|
||||
delete packageMetadata['_id'];
|
||||
delete packageMetadata['dist'];
|
||||
delete packageMetadata['readme'];
|
||||
delete packageMetadata['readmeFilename'];
|
||||
|
||||
const packageModuleCache = packageMetadata._atomModuleCache || {};
|
||||
if (
|
||||
packageModuleCache.extensions &&
|
||||
packageModuleCache.extensions['.json']
|
||||
) {
|
||||
const index = packageModuleCache.extensions['.json'].indexOf(
|
||||
'package.json'
|
||||
);
|
||||
if (index !== -1) {
|
||||
packageModuleCache.extensions['.json'].splice(index, 1);
|
||||
}
|
||||
}
|
||||
|
||||
const packageNewMetadata = {
|
||||
metadata: packageMetadata,
|
||||
keymaps: {},
|
||||
menus: {},
|
||||
grammarPaths: [],
|
||||
settings: {}
|
||||
};
|
||||
|
||||
packageNewMetadata.rootDirPath = path.relative(
|
||||
CONFIG.intermediateAppPath,
|
||||
packagePath
|
||||
);
|
||||
|
||||
if (packageMetadata.main) {
|
||||
const mainPath = require.resolve(
|
||||
path.resolve(packagePath, packageMetadata.main)
|
||||
);
|
||||
packageNewMetadata.main = path.relative(
|
||||
path.join(CONFIG.intermediateAppPath, 'static'),
|
||||
mainPath
|
||||
);
|
||||
// Convert backward slashes to forward slashes in order to allow package
|
||||
// main modules to be required from the snapshot. This is because we use
|
||||
// forward slashes to cache the sources in the snapshot, so we need to use
|
||||
// them here as well.
|
||||
packageNewMetadata.main = packageNewMetadata.main.replace(/\\/g, '/');
|
||||
}
|
||||
|
||||
const packageKeymapsPath = path.join(packagePath, 'keymaps');
|
||||
if (fs.existsSync(packageKeymapsPath)) {
|
||||
for (let packageKeymapName of fs.readdirSync(packageKeymapsPath)) {
|
||||
const packageKeymapPath = path.join(
|
||||
packageKeymapsPath,
|
||||
packageKeymapName
|
||||
);
|
||||
if (
|
||||
packageKeymapPath.endsWith('.cson') ||
|
||||
packageKeymapPath.endsWith('.json')
|
||||
) {
|
||||
const relativePath = path.relative(
|
||||
CONFIG.intermediateAppPath,
|
||||
packageKeymapPath
|
||||
);
|
||||
packageNewMetadata.keymaps[relativePath] = CSON.readFileSync(
|
||||
packageKeymapPath
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const packageMenusPath = path.join(packagePath, 'menus');
|
||||
if (fs.existsSync(packageMenusPath)) {
|
||||
for (let packageMenuName of fs.readdirSync(packageMenusPath)) {
|
||||
const packageMenuPath = path.join(packageMenusPath, packageMenuName);
|
||||
if (
|
||||
packageMenuPath.endsWith('.cson') ||
|
||||
packageMenuPath.endsWith('.json')
|
||||
) {
|
||||
const relativePath = path.relative(
|
||||
CONFIG.intermediateAppPath,
|
||||
packageMenuPath
|
||||
);
|
||||
packageNewMetadata.menus[relativePath] = CSON.readFileSync(
|
||||
packageMenuPath
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const packageGrammarsPath = path.join(packagePath, 'grammars');
|
||||
for (let packageGrammarPath of fs.listSync(packageGrammarsPath, [
|
||||
'json',
|
||||
'cson'
|
||||
])) {
|
||||
const relativePath = path.relative(
|
||||
CONFIG.intermediateAppPath,
|
||||
packageGrammarPath
|
||||
);
|
||||
packageNewMetadata.grammarPaths.push(relativePath);
|
||||
}
|
||||
|
||||
const packageSettingsPath = path.join(packagePath, 'settings');
|
||||
for (let packageSettingPath of fs.listSync(packageSettingsPath, [
|
||||
'json',
|
||||
'cson'
|
||||
])) {
|
||||
const relativePath = path.relative(
|
||||
CONFIG.intermediateAppPath,
|
||||
packageSettingPath
|
||||
);
|
||||
packageNewMetadata.settings[relativePath] = CSON.readFileSync(
|
||||
packageSettingPath
|
||||
);
|
||||
}
|
||||
|
||||
const packageStyleSheetsPath = path.join(packagePath, 'styles');
|
||||
let styleSheets = null;
|
||||
if (packageMetadata.mainStyleSheet) {
|
||||
styleSheets = [fs.resolve(packagePath, packageMetadata.mainStyleSheet)];
|
||||
} else if (packageMetadata.styleSheets) {
|
||||
styleSheets = packageMetadata.styleSheets.map(name =>
|
||||
fs.resolve(packageStyleSheetsPath, name, ['css', 'less', ''])
|
||||
);
|
||||
} else {
|
||||
const indexStylesheet = fs.resolve(packagePath, 'index', ['css', 'less']);
|
||||
if (indexStylesheet) {
|
||||
styleSheets = [indexStylesheet];
|
||||
} else {
|
||||
styleSheets = fs.listSync(packageStyleSheetsPath, ['css', 'less']);
|
||||
}
|
||||
}
|
||||
|
||||
packageNewMetadata.styleSheetPaths = styleSheets.map(styleSheetPath =>
|
||||
path.relative(packagePath, styleSheetPath)
|
||||
);
|
||||
|
||||
packages[packageMetadata.name] = packageNewMetadata;
|
||||
if (packageModuleCache.extensions) {
|
||||
for (let extension of Object.keys(packageModuleCache.extensions)) {
|
||||
const paths = packageModuleCache.extensions[extension];
|
||||
if (paths.length === 0) {
|
||||
delete packageModuleCache.extensions[extension];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return packages;
|
||||
}
|
||||
|
||||
function buildPlatformMenuMetadata() {
|
||||
const menuPath = path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'menus',
|
||||
`${process.platform}.cson`
|
||||
);
|
||||
if (fs.existsSync(menuPath)) {
|
||||
return CSON.readFileSync(menuPath);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function buildPlatformKeymapsMetadata() {
|
||||
const invalidPlatforms = [
|
||||
'darwin',
|
||||
'freebsd',
|
||||
'linux',
|
||||
'sunos',
|
||||
'win32'
|
||||
].filter(p => p !== process.platform);
|
||||
const keymapsPath = path.join(CONFIG.repositoryRootPath, 'keymaps');
|
||||
const keymaps = {};
|
||||
for (let keymapName of fs.readdirSync(keymapsPath)) {
|
||||
const keymapPath = path.join(keymapsPath, keymapName);
|
||||
if (keymapPath.endsWith('.cson') || keymapPath.endsWith('.json')) {
|
||||
const keymapPlatform = path.basename(
|
||||
keymapPath,
|
||||
path.extname(keymapPath)
|
||||
);
|
||||
if (invalidPlatforms.indexOf(keymapPlatform) === -1) {
|
||||
keymaps[path.basename(keymapPath)] = CSON.readFileSync(keymapPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
return keymaps;
|
||||
}
|
||||
|
||||
function checkDeprecatedPackagesMetadata() {
|
||||
for (let packageName of Object.keys(deprecatedPackagesMetadata)) {
|
||||
const packageMetadata = deprecatedPackagesMetadata[packageName];
|
||||
if (
|
||||
packageMetadata.version &&
|
||||
!semver.validRange(packageMetadata.version)
|
||||
) {
|
||||
throw new Error(
|
||||
`Invalid range: ${packageMetadata.version} (${packageName}).`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,38 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const ModuleCache = require('../../src/module-cache');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function() {
|
||||
console.log(`Generating module cache for ${CONFIG.intermediateAppPath}`);
|
||||
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
|
||||
ModuleCache.create(
|
||||
path.join(CONFIG.intermediateAppPath, 'node_modules', packageName)
|
||||
);
|
||||
}
|
||||
ModuleCache.create(CONFIG.intermediateAppPath);
|
||||
const newMetadata = JSON.parse(
|
||||
fs.readFileSync(path.join(CONFIG.intermediateAppPath, 'package.json'))
|
||||
);
|
||||
for (let folder of newMetadata._atomModuleCache.folders) {
|
||||
if (folder.paths.indexOf('') !== -1) {
|
||||
folder.paths = [
|
||||
'',
|
||||
'exports',
|
||||
'spec',
|
||||
'src',
|
||||
'src/main-process',
|
||||
'static',
|
||||
'vendor'
|
||||
];
|
||||
}
|
||||
}
|
||||
CONFIG.appMetadata = newMetadata;
|
||||
fs.writeFileSync(
|
||||
path.join(CONFIG.intermediateAppPath, 'package.json'),
|
||||
JSON.stringify(CONFIG.appMetadata)
|
||||
);
|
||||
};
|
@ -1,352 +0,0 @@
|
||||
const childProcess = require('child_process');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const electronLink = require('electron-link');
|
||||
const terser = require('terser');
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function(packagedAppPath) {
|
||||
const snapshotScriptPath = path.join(CONFIG.buildOutputPath, 'startup.js');
|
||||
const coreModules = new Set([
|
||||
'electron',
|
||||
'atom',
|
||||
'shell',
|
||||
'WNdb',
|
||||
'lapack',
|
||||
'remote'
|
||||
]);
|
||||
const baseDirPath = path.join(CONFIG.intermediateAppPath, 'static');
|
||||
let processedFiles = 0;
|
||||
|
||||
return electronLink({
|
||||
baseDirPath,
|
||||
mainPath: path.resolve(
|
||||
baseDirPath,
|
||||
'..',
|
||||
'src',
|
||||
'initialize-application-window.js'
|
||||
),
|
||||
cachePath: path.join(CONFIG.atomHomeDirPath, 'snapshot-cache'),
|
||||
auxiliaryData: CONFIG.snapshotAuxiliaryData,
|
||||
shouldExcludeModule: ({ requiringModulePath, requiredModulePath }) => {
|
||||
if (processedFiles > 0) {
|
||||
process.stdout.write('\r');
|
||||
}
|
||||
process.stdout.write(
|
||||
`Generating snapshot script at "${snapshotScriptPath}" (${++processedFiles})`
|
||||
);
|
||||
|
||||
const requiringModuleRelativePath = path.relative(
|
||||
baseDirPath,
|
||||
requiringModulePath
|
||||
);
|
||||
const requiredModuleRelativePath = path.relative(
|
||||
baseDirPath,
|
||||
requiredModulePath
|
||||
);
|
||||
return (
|
||||
requiredModulePath.endsWith('.node') ||
|
||||
coreModules.has(requiredModulePath) ||
|
||||
requiringModuleRelativePath.endsWith(
|
||||
path.join('node_modules/xregexp/xregexp-all.js')
|
||||
) ||
|
||||
(requiredModuleRelativePath.startsWith(path.join('..', 'src')) &&
|
||||
requiredModuleRelativePath.endsWith('-element.js')) ||
|
||||
requiredModuleRelativePath.startsWith(
|
||||
path.join('..', 'node_modules', 'dugite')
|
||||
) ||
|
||||
requiredModuleRelativePath.startsWith(
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'markdown-preview',
|
||||
'node_modules',
|
||||
'yaml-front-matter'
|
||||
)
|
||||
) ||
|
||||
requiredModuleRelativePath.startsWith(
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'markdown-preview',
|
||||
'node_modules',
|
||||
'cheerio'
|
||||
)
|
||||
) ||
|
||||
requiredModuleRelativePath.startsWith(
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'markdown-preview',
|
||||
'node_modules',
|
||||
'marked'
|
||||
)
|
||||
) ||
|
||||
requiredModuleRelativePath.startsWith(
|
||||
path.join('..', 'node_modules', 'typescript-simple')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join(
|
||||
'node_modules',
|
||||
'coffee-script',
|
||||
'lib',
|
||||
'coffee-script',
|
||||
'register.js'
|
||||
)
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'fs-extra', 'lib', 'index.js')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'graceful-fs', 'graceful-fs.js')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'htmlparser2', 'lib', 'index.js')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'minimatch', 'minimatch.js')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'request', 'index.js')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'request', 'request.js')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'superstring', 'index.js')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'temp', 'lib', 'temp.js')
|
||||
) ||
|
||||
requiredModuleRelativePath.endsWith(
|
||||
path.join('node_modules', 'parse5', 'lib', 'index.js')
|
||||
) ||
|
||||
requiredModuleRelativePath === path.join('..', 'exports', 'atom.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'src', 'electron-shims.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'atom-keymap',
|
||||
'lib',
|
||||
'command-event.js'
|
||||
) ||
|
||||
(requiredModuleRelativePath.includes('@babel') &&
|
||||
// GitHub package uses this in its relay dependency which is required on startup
|
||||
!requiredModuleRelativePath.includes(
|
||||
path.join('@babel', 'runtime')
|
||||
)) ||
|
||||
requiredModuleRelativePath.includes('babel-plugin') ||
|
||||
requiredModuleRelativePath.includes('babel-preset') ||
|
||||
requiredModuleRelativePath.includes('supports-color') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'debug', 'node.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'git-utils', 'src', 'git.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'glob', 'glob.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'iconv-lite', 'lib', 'index.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'less', 'index.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'less', 'lib', 'less', 'fs.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'less',
|
||||
'lib',
|
||||
'less-node',
|
||||
'index.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'lodash.isequal', 'index.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'node-fetch',
|
||||
'lib',
|
||||
'fetch-error.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'oniguruma', 'src', 'oniguruma.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'resolve', 'index.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'resolve', 'lib', 'core.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'settings-view',
|
||||
'node_modules',
|
||||
'glob',
|
||||
'glob.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'spell-check',
|
||||
'lib',
|
||||
'locale-checker.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'spell-check',
|
||||
'lib',
|
||||
'system-checker.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'spellchecker',
|
||||
'lib',
|
||||
'spellchecker.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'spelling-manager',
|
||||
'node_modules',
|
||||
'natural',
|
||||
'lib',
|
||||
'natural',
|
||||
'index.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'tar', 'tar.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'ls-archive',
|
||||
'node_modules',
|
||||
'tar',
|
||||
'tar.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'tmp', 'lib', 'tmp.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'tree-sitter', 'index.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'yauzl', 'index.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'util-deprecate', 'node.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'winreg', 'lib', 'registry.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'scandal', 'lib', 'scandal.js') ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'@atom',
|
||||
'fuzzy-native',
|
||||
'lib',
|
||||
'main.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join(
|
||||
'..',
|
||||
'node_modules',
|
||||
'vscode-ripgrep',
|
||||
'lib',
|
||||
'index.js'
|
||||
) ||
|
||||
requiredModuleRelativePath ===
|
||||
path.join('..', 'node_modules', 'nsfw', 'js', 'src', 'index.js') ||
|
||||
// The startup-time script is used by both the renderer and the main process and having it in the
|
||||
// snapshot causes issues.
|
||||
requiredModuleRelativePath === path.join('..', 'src', 'startup-time.js')
|
||||
);
|
||||
}
|
||||
}).then(({ snapshotScript }) => {
|
||||
process.stdout.write('\n');
|
||||
|
||||
process.stdout.write('Minifying startup script');
|
||||
const minification = terser.minify(snapshotScript, {
|
||||
keep_fnames: true,
|
||||
keep_classnames: true,
|
||||
compress: { keep_fargs: true, keep_infinity: true }
|
||||
});
|
||||
if (minification.error) throw minification.error;
|
||||
process.stdout.write('\n');
|
||||
fs.writeFileSync(snapshotScriptPath, minification.code);
|
||||
|
||||
console.log('Verifying if snapshot can be executed via `mksnapshot`');
|
||||
const verifySnapshotScriptPath = path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'script',
|
||||
'verify-snapshot-script.js'
|
||||
);
|
||||
let nodeBundledInElectronPath;
|
||||
if (process.platform === 'darwin') {
|
||||
nodeBundledInElectronPath = path.join(
|
||||
packagedAppPath,
|
||||
'Contents',
|
||||
'MacOS',
|
||||
CONFIG.executableName
|
||||
);
|
||||
} else {
|
||||
nodeBundledInElectronPath = path.join(
|
||||
packagedAppPath,
|
||||
CONFIG.executableName
|
||||
);
|
||||
}
|
||||
childProcess.execFileSync(
|
||||
nodeBundledInElectronPath,
|
||||
[verifySnapshotScriptPath, snapshotScriptPath],
|
||||
{ env: Object.assign({}, process.env, { ELECTRON_RUN_AS_NODE: 1 }) }
|
||||
);
|
||||
|
||||
console.log('Generating startup blob with mksnapshot');
|
||||
childProcess.spawnSync(process.execPath, [
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'script',
|
||||
'node_modules',
|
||||
'electron-mksnapshot',
|
||||
'mksnapshot.js'
|
||||
),
|
||||
snapshotScriptPath,
|
||||
'--output_dir',
|
||||
CONFIG.buildOutputPath
|
||||
]);
|
||||
|
||||
let startupBlobDestinationPath;
|
||||
if (process.platform === 'darwin') {
|
||||
startupBlobDestinationPath = `${packagedAppPath}/Contents/Frameworks/Electron Framework.framework/Resources`;
|
||||
} else {
|
||||
startupBlobDestinationPath = packagedAppPath;
|
||||
}
|
||||
|
||||
const snapshotBinaries = ['v8_context_snapshot.bin', 'snapshot_blob.bin'];
|
||||
for (let snapshotBinary of snapshotBinaries) {
|
||||
const destinationPath = path.join(
|
||||
startupBlobDestinationPath,
|
||||
snapshotBinary
|
||||
);
|
||||
console.log(`Moving generated startup blob into "${destinationPath}"`);
|
||||
try {
|
||||
fs.unlinkSync(destinationPath);
|
||||
} catch (err) {
|
||||
// Doesn't matter if the file doesn't exist already
|
||||
if (!err.code || err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
fs.renameSync(
|
||||
path.join(CONFIG.buildOutputPath, snapshotBinary),
|
||||
destinationPath
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
@ -1,46 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const legalEagle = require('legal-eagle');
|
||||
|
||||
const licenseOverrides = require('../license-overrides');
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function() {
|
||||
return new Promise((resolve, reject) => {
|
||||
legalEagle(
|
||||
{ path: CONFIG.repositoryRootPath, overrides: licenseOverrides },
|
||||
(err, packagesLicenses) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
throw new Error(err);
|
||||
} else {
|
||||
let text =
|
||||
fs.readFileSync(
|
||||
path.join(CONFIG.repositoryRootPath, 'LICENSE.md'),
|
||||
'utf8'
|
||||
) +
|
||||
'\n\n' +
|
||||
'This application bundles the following third-party packages in accordance\n' +
|
||||
'with the following licenses:\n\n';
|
||||
for (let packageName of Object.keys(packagesLicenses).sort()) {
|
||||
const packageLicense = packagesLicenses[packageName];
|
||||
text +=
|
||||
'-------------------------------------------------------------------------\n\n';
|
||||
text += `Package: ${packageName}\n`;
|
||||
text += `License: ${packageLicense.license}\n`;
|
||||
if (packageLicense.source) {
|
||||
text += `License Source: ${packageLicense.source}\n`;
|
||||
}
|
||||
if (packageLicense.sourceText) {
|
||||
text += `Source Text:\n\n${packageLicense.sourceText}`;
|
||||
}
|
||||
text += '\n';
|
||||
}
|
||||
resolve(text);
|
||||
}
|
||||
}
|
||||
);
|
||||
});
|
||||
};
|
@ -1,29 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const os = require('os');
|
||||
const passwdUser = require('passwd-user');
|
||||
const path = require('path');
|
||||
|
||||
module.exports = function(aPath) {
|
||||
if (!aPath.startsWith('~')) {
|
||||
return aPath;
|
||||
}
|
||||
|
||||
const sepIndex = aPath.indexOf(path.sep);
|
||||
const user = sepIndex < 0 ? aPath.substring(1) : aPath.substring(1, sepIndex);
|
||||
const rest = sepIndex < 0 ? '' : aPath.substring(sepIndex);
|
||||
const home =
|
||||
user === ''
|
||||
? os.homedir()
|
||||
: (() => {
|
||||
const passwd = passwdUser.sync(user);
|
||||
if (passwd === undefined) {
|
||||
throw new Error(
|
||||
`Failed to expand the tilde in ${aPath} - user "${user}" does not exist`
|
||||
);
|
||||
}
|
||||
return passwd.homedir;
|
||||
})();
|
||||
|
||||
return `${home}${rest}`;
|
||||
};
|
@ -1,159 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function(filePath) {
|
||||
return (
|
||||
!EXCLUDED_PATHS_REGEXP.test(filePath) ||
|
||||
INCLUDED_PATHS_REGEXP.test(filePath)
|
||||
);
|
||||
};
|
||||
|
||||
const EXCLUDE_REGEXPS_SOURCES = [
|
||||
escapeRegExp('.DS_Store'),
|
||||
escapeRegExp('.jshintrc'),
|
||||
escapeRegExp('.npmignore'),
|
||||
escapeRegExp('.pairs'),
|
||||
escapeRegExp('.idea'),
|
||||
escapeRegExp('.editorconfig'),
|
||||
escapeRegExp('.lint'),
|
||||
escapeRegExp('.lintignore'),
|
||||
escapeRegExp('.eslintrc'),
|
||||
escapeRegExp('.jshintignore'),
|
||||
escapeRegExp('coffeelint.json'),
|
||||
escapeRegExp('.coffeelintignore'),
|
||||
escapeRegExp('.gitattributes'),
|
||||
escapeRegExp('.gitkeep'),
|
||||
escapeRegExp(path.join('git-utils', 'deps')),
|
||||
escapeRegExp(path.join('oniguruma', 'deps')),
|
||||
escapeRegExp(path.join('less', 'dist')),
|
||||
escapeRegExp(path.join('npm', 'doc')),
|
||||
escapeRegExp(path.join('npm', 'html')),
|
||||
escapeRegExp(path.join('npm', 'man')),
|
||||
escapeRegExp(path.join('npm', 'node_modules', '.bin', 'beep')),
|
||||
escapeRegExp(path.join('npm', 'node_modules', '.bin', 'clear')),
|
||||
escapeRegExp(path.join('npm', 'node_modules', '.bin', 'starwars')),
|
||||
escapeRegExp(path.join('pegjs', 'examples')),
|
||||
escapeRegExp(path.join('get-parameter-names', 'node_modules', 'testla')),
|
||||
escapeRegExp(
|
||||
path.join('get-parameter-names', 'node_modules', '.bin', 'testla')
|
||||
),
|
||||
escapeRegExp(path.join('jasmine-reporters', 'ext')),
|
||||
escapeRegExp(path.join('node_modules', 'nan')) + '\\b',
|
||||
escapeRegExp(path.join('node_modules', 'native-mate')),
|
||||
escapeRegExp(path.join('build', 'binding.Makefile')),
|
||||
escapeRegExp(path.join('build', 'config.gypi')),
|
||||
escapeRegExp(path.join('build', 'gyp-mac-tool')),
|
||||
escapeRegExp(path.join('build', 'Makefile')),
|
||||
escapeRegExp(path.join('build', 'Release', 'obj.target')),
|
||||
escapeRegExp(path.join('build', 'Release', 'obj')),
|
||||
escapeRegExp(path.join('build', 'Release', '.deps')),
|
||||
escapeRegExp(path.join('deps', 'libgit2')),
|
||||
escapeRegExp(path.join('vendor', 'apm')),
|
||||
|
||||
// These are only required in dev-mode, when pegjs grammars aren't precompiled
|
||||
escapeRegExp(path.join('node_modules', 'loophole')),
|
||||
escapeRegExp(path.join('node_modules', 'pegjs')),
|
||||
escapeRegExp(path.join('node_modules', '.bin', 'pegjs')),
|
||||
escapeRegExp(
|
||||
path.join('node_modules', 'spellchecker', 'vendor', 'hunspell') + path.sep
|
||||
) + '.*',
|
||||
|
||||
// node_modules of the fuzzy-native package are only required for building it.
|
||||
escapeRegExp(path.join('node_modules', 'fuzzy-native', 'node_modules')),
|
||||
|
||||
// Ignore *.cc and *.h files from native modules
|
||||
escapeRegExp(path.sep) + '.+\\.(cc|h)$',
|
||||
|
||||
// Ignore build files
|
||||
escapeRegExp(path.sep) + 'binding\\.gyp$',
|
||||
escapeRegExp(path.sep) + '.+\\.target.mk$',
|
||||
escapeRegExp(path.sep) + 'linker\\.lock$',
|
||||
escapeRegExp(path.join('build', 'Release') + path.sep) + '.+\\.node\\.dSYM',
|
||||
escapeRegExp(path.join('build', 'Release') + path.sep) +
|
||||
'.*\\.(pdb|lib|exp|map|ipdb|iobj)',
|
||||
|
||||
// Ignore node_module files we won't need at runtime
|
||||
'node_modules' +
|
||||
escapeRegExp(path.sep) +
|
||||
'.*' +
|
||||
escapeRegExp(path.sep) +
|
||||
'_*te?sts?_*' +
|
||||
escapeRegExp(path.sep),
|
||||
|
||||
'node_modules' +
|
||||
escapeRegExp(path.sep) +
|
||||
'.*' +
|
||||
escapeRegExp(path.sep) +
|
||||
'tests?' +
|
||||
escapeRegExp(path.sep),
|
||||
|
||||
'node_modules' +
|
||||
escapeRegExp(path.sep) +
|
||||
'.*' +
|
||||
escapeRegExp(path.sep) +
|
||||
'examples?' +
|
||||
escapeRegExp(path.sep),
|
||||
'node_modules' + escapeRegExp(path.sep) + '.*' + '\\.d\\.ts$',
|
||||
'node_modules' + escapeRegExp(path.sep) + '.*' + '\\.js\\.map$',
|
||||
'.*' + escapeRegExp(path.sep) + 'test.*\\.html$',
|
||||
|
||||
// specific spec folders hand-picked
|
||||
'node_modules' +
|
||||
escapeRegExp(path.sep) +
|
||||
'(oniguruma|dev-live-reload|deprecation-cop|one-dark-ui|incompatible-packages|git-diff|line-ending-selector|link|grammar-selector|json-schema-traverse|exception-reporting|one-light-ui|autoflow|about|go-to-line|sylvester|apparatus)' +
|
||||
escapeRegExp(path.sep) +
|
||||
'spec' +
|
||||
escapeRegExp(path.sep),
|
||||
|
||||
// babel-core spec
|
||||
'node_modules' +
|
||||
escapeRegExp(path.sep) +
|
||||
'babel-core' +
|
||||
escapeRegExp(path.sep) +
|
||||
'lib' +
|
||||
escapeRegExp(path.sep) +
|
||||
'transformation' +
|
||||
escapeRegExp(path.sep) +
|
||||
'transforers' +
|
||||
escapeRegExp(path.sep) +
|
||||
'spec' +
|
||||
escapeRegExp(path.sep)
|
||||
];
|
||||
|
||||
// Ignore spec directories in all bundled packages
|
||||
for (let packageName in CONFIG.appMetadata.packageDependencies) {
|
||||
EXCLUDE_REGEXPS_SOURCES.push(
|
||||
'^' +
|
||||
escapeRegExp(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'spec'
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Ignore Hunspell dictionaries only on macOS.
|
||||
if (process.platform === 'darwin') {
|
||||
EXCLUDE_REGEXPS_SOURCES.push(
|
||||
escapeRegExp(path.join('spellchecker', 'vendor', 'hunspell_dictionaries'))
|
||||
);
|
||||
}
|
||||
|
||||
const EXCLUDED_PATHS_REGEXP = new RegExp(
|
||||
EXCLUDE_REGEXPS_SOURCES.map(path => `(${path})`).join('|')
|
||||
);
|
||||
|
||||
const INCLUDED_PATHS_REGEXP = new RegExp(
|
||||
escapeRegExp(
|
||||
path.join('node_modules', 'node-gyp', 'src', 'win_delay_load_hook.cc')
|
||||
)
|
||||
);
|
||||
|
||||
function escapeRegExp(string) {
|
||||
return string.replace(/[.?*+^$[\]\\(){}|-]/g, '\\$&');
|
||||
}
|
@ -1,28 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const childProcess = require('child_process');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
function installApm(ci = false, showVersion = true) {
|
||||
if (ci) {
|
||||
// Tell apm not to dedupe its own dependencies during its
|
||||
// postinstall script. (Deduping during `npm ci` runs is broken.)
|
||||
process.env.NO_APM_DEDUPE = 'true';
|
||||
}
|
||||
console.log('Installing apm');
|
||||
childProcess.execFileSync(
|
||||
CONFIG.getLocalNpmBinPath(),
|
||||
['--global-style', '--loglevel=error', ci ? 'ci' : 'install'],
|
||||
{ env: process.env, cwd: CONFIG.apmRootPath }
|
||||
);
|
||||
if (showVersion) {
|
||||
childProcess.execFileSync(CONFIG.getApmBinPath(), ['--version'], {
|
||||
stdio: 'inherit'
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const { expose } = require(`${CONFIG.scriptRunnerModulesPath}/threads/worker`);
|
||||
expose(installApm);
|
||||
module.exports = installApm;
|
@ -1,234 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs-extra');
|
||||
const handleTilde = require('./handle-tilde');
|
||||
const path = require('path');
|
||||
const template = require('lodash.template');
|
||||
const startCase = require('lodash.startcase');
|
||||
const execSync = require('child_process').execSync;
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
function install(installationDirPath, packagedAppFileName, packagedAppPath) {
|
||||
if (fs.existsSync(installationDirPath)) {
|
||||
console.log(
|
||||
`Removing previously installed "${packagedAppFileName}" at "${installationDirPath}"`
|
||||
);
|
||||
fs.removeSync(installationDirPath);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Installing "${packagedAppFileName}" at "${installationDirPath}"`
|
||||
);
|
||||
fs.copySync(packagedAppPath, installationDirPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the path to the base directory of the icon default icon theme
|
||||
* This follows the freedesktop Icon Theme Specification:
|
||||
* https://standards.freedesktop.org/icon-theme-spec/icon-theme-spec-latest.html#install_icons
|
||||
* and the XDG Base Directory Specification:
|
||||
* https://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html#variables
|
||||
*/
|
||||
function findBaseIconThemeDirPath() {
|
||||
const defaultBaseIconThemeDir = '/usr/share/icons/hicolor';
|
||||
const dataDirsString = process.env.XDG_DATA_DIRS;
|
||||
if (dataDirsString) {
|
||||
const dataDirs = dataDirsString.split(path.delimiter);
|
||||
if (dataDirs.includes('/usr/share/') || dataDirs.includes('/usr/share')) {
|
||||
return defaultBaseIconThemeDir;
|
||||
} else {
|
||||
return path.join(dataDirs[0], 'icons', 'hicolor');
|
||||
}
|
||||
} else {
|
||||
return defaultBaseIconThemeDir;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function(packagedAppPath, installDir) {
|
||||
const packagedAppFileName = path.basename(packagedAppPath);
|
||||
if (process.platform === 'darwin') {
|
||||
const installPrefix =
|
||||
installDir !== ''
|
||||
? handleTilde(installDir)
|
||||
: path.join(path.sep, 'Applications');
|
||||
const installationDirPath = path.join(installPrefix, packagedAppFileName);
|
||||
install(installationDirPath, packagedAppFileName, packagedAppPath);
|
||||
} else if (process.platform === 'win32') {
|
||||
const installPrefix =
|
||||
installDir !== '' ? installDir : process.env.LOCALAPPDATA;
|
||||
const installationDirPath = path.join(
|
||||
installPrefix,
|
||||
packagedAppFileName,
|
||||
'app-dev'
|
||||
);
|
||||
try {
|
||||
install(installationDirPath, packagedAppFileName, packagedAppPath);
|
||||
} catch (e) {
|
||||
console.log(
|
||||
`Administrator elevation required to install into "${installationDirPath}"`
|
||||
);
|
||||
const fsAdmin = require('fs-admin');
|
||||
return new Promise((resolve, reject) => {
|
||||
fsAdmin.recursiveCopy(packagedAppPath, installationDirPath, error => {
|
||||
error ? reject(error) : resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const editorExecutableName =
|
||||
CONFIG.channel === 'stable' ? 'pulsar' : 'pulsar-' + CONFIG.channel;
|
||||
const pkgMgrExecutableName =
|
||||
CONFIG.channel === 'stable' ? 'apm' : 'apm-' + CONFIG.channel;
|
||||
const appName =
|
||||
CONFIG.channel === 'stable'
|
||||
? 'Pulsar'
|
||||
: startCase('Pulsar ' + CONFIG.channel);
|
||||
const appDescription = CONFIG.appMetadata.description;
|
||||
const prefixDirPath =
|
||||
installDir !== '' ? handleTilde(installDir) : path.join('/usr', 'local');
|
||||
const shareDirPath = path.join(prefixDirPath, 'share');
|
||||
const installationDirPath = path.join(shareDirPath, editorExecutableName);
|
||||
const applicationsDirPath = path.join(shareDirPath, 'applications');
|
||||
|
||||
const binDirPath = path.join(prefixDirPath, 'bin');
|
||||
|
||||
fs.mkdirpSync(applicationsDirPath);
|
||||
fs.mkdirpSync(binDirPath);
|
||||
|
||||
install(installationDirPath, packagedAppFileName, packagedAppPath);
|
||||
|
||||
{
|
||||
// Install icons
|
||||
const baseIconThemeDirPath = findBaseIconThemeDirPath();
|
||||
const fullIconName = editorExecutableName + '.png';
|
||||
|
||||
let existingIconsFound = false;
|
||||
fs.readdirSync(baseIconThemeDirPath).forEach(size => {
|
||||
const iconPath = path.join(
|
||||
baseIconThemeDirPath,
|
||||
size,
|
||||
'apps',
|
||||
fullIconName
|
||||
);
|
||||
if (fs.existsSync(iconPath)) {
|
||||
if (!existingIconsFound) {
|
||||
console.log(
|
||||
`Removing existing icons from "${baseIconThemeDirPath}"`
|
||||
);
|
||||
}
|
||||
existingIconsFound = true;
|
||||
fs.removeSync(iconPath);
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`Installing icons at "${baseIconThemeDirPath}"`);
|
||||
const appIconsPath = path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'app-icons',
|
||||
CONFIG.channel,
|
||||
'png'
|
||||
);
|
||||
fs.readdirSync(appIconsPath).forEach(imageName => {
|
||||
if (/\.png$/.test(imageName)) {
|
||||
const size = path.basename(imageName, '.png');
|
||||
const iconPath = path.join(appIconsPath, imageName);
|
||||
fs.copySync(
|
||||
iconPath,
|
||||
path.join(
|
||||
baseIconThemeDirPath,
|
||||
`${size}x${size}`,
|
||||
'apps',
|
||||
fullIconName
|
||||
)
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`Updating icon cache for "${baseIconThemeDirPath}"`);
|
||||
try {
|
||||
execSync(`gtk-update-icon-cache ${baseIconThemeDirPath} --force`);
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
{
|
||||
// Install xdg desktop file
|
||||
const desktopEntryPath = path.join(
|
||||
applicationsDirPath,
|
||||
`${editorExecutableName}.desktop`
|
||||
);
|
||||
if (fs.existsSync(desktopEntryPath)) {
|
||||
console.log(
|
||||
`Removing existing desktop entry file at "${desktopEntryPath}"`
|
||||
);
|
||||
fs.removeSync(desktopEntryPath);
|
||||
}
|
||||
console.log(`Writing desktop entry file at "${desktopEntryPath}"`);
|
||||
const desktopEntryTemplate = fs.readFileSync(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'linux',
|
||||
'pulsar.desktop.in'
|
||||
)
|
||||
);
|
||||
const desktopEntryContents = template(desktopEntryTemplate)({
|
||||
appName,
|
||||
appFileName: editorExecutableName,
|
||||
description: appDescription,
|
||||
installDir: prefixDirPath,
|
||||
iconPath: editorExecutableName
|
||||
});
|
||||
fs.writeFileSync(desktopEntryPath, desktopEntryContents);
|
||||
}
|
||||
|
||||
{
|
||||
// Add pulsar executable to the PATH
|
||||
const editorBinDestinationPath = path.join(binDirPath, editorExecutableName);
|
||||
if (fs.existsSync(editorBinDestinationPath)) {
|
||||
console.log(
|
||||
`Removing existing executable at "${editorBinDestinationPath}"`
|
||||
);
|
||||
fs.removeSync(editorBinDestinationPath);
|
||||
}
|
||||
console.log(`Copying pulsar.sh to "${editorBinDestinationPath}"`);
|
||||
fs.copySync(
|
||||
path.join(CONFIG.repositoryRootPath, 'pulsar.sh'),
|
||||
editorBinDestinationPath
|
||||
);
|
||||
}
|
||||
|
||||
{
|
||||
// Link apm executable to the PATH
|
||||
const pkgMgrBinDestinationPath = path.join(binDirPath, pkgMgrExecutableName);
|
||||
try {
|
||||
fs.lstatSync(pkgMgrBinDestinationPath);
|
||||
console.log(
|
||||
`Removing existing executable at "${pkgMgrBinDestinationPath}"`
|
||||
);
|
||||
fs.removeSync(pkgMgrBinDestinationPath);
|
||||
} catch (e) {}
|
||||
console.log(`Symlinking apm to "${pkgMgrBinDestinationPath}"`);
|
||||
fs.symlinkSync(
|
||||
path.join(
|
||||
'..',
|
||||
'share',
|
||||
editorExecutableName,
|
||||
'resources',
|
||||
'app',
|
||||
'apm',
|
||||
'node_modules',
|
||||
'.bin',
|
||||
'apm'
|
||||
),
|
||||
pkgMgrBinDestinationPath
|
||||
);
|
||||
}
|
||||
|
||||
console.log(`Changing permissions to 755 for "${installationDirPath}"`);
|
||||
fs.chmodSync(installationDirPath, '755');
|
||||
}
|
||||
|
||||
return Promise.resolve();
|
||||
};
|
@ -1,21 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const childProcess = require('child_process');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
// Recognised by '@electron/get', used by the 'electron-mksnapshot' and 'electron-chromedriver' dependencies
|
||||
process.env.ELECTRON_CUSTOM_VERSION = CONFIG.appMetadata.electronVersion;
|
||||
|
||||
function installScriptDependencies(ci) {
|
||||
console.log('Installing script dependencies');
|
||||
childProcess.execFileSync(
|
||||
CONFIG.getNpmBinPath(ci),
|
||||
['--loglevel=error', ci ? 'ci' : 'install'],
|
||||
{ env: process.env, cwd: CONFIG.scriptRootPath }
|
||||
);
|
||||
}
|
||||
|
||||
const { expose } = require(`${CONFIG.scriptRunnerModulesPath}/threads/worker`);
|
||||
expose(installScriptDependencies);
|
||||
module.exports = installScriptDependencies;
|
@ -1,16 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const childProcess = require('child_process');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
function installScriptRunnerDependencies(ci) {
|
||||
console.log('Installing script runner dependencies');
|
||||
childProcess.execFileSync(
|
||||
CONFIG.getNpmBinPath(ci),
|
||||
['--loglevel=error', ci ? 'ci' : 'install'],
|
||||
{ env: process.env, cwd: CONFIG.scriptRunnerRootPath }
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = installScriptRunnerDependencies;
|
@ -1,12 +0,0 @@
|
||||
const childProcess = require('child_process');
|
||||
|
||||
const CONFIG = require('../config.js');
|
||||
|
||||
module.exports = function() {
|
||||
if (process.platform === 'win32') {
|
||||
// Use START as a way to ignore error if Pulsar.exe isnt running
|
||||
childProcess.execSync(`START taskkill /F /IM ${CONFIG.executableName}`);
|
||||
} else {
|
||||
childProcess.execSync(`pkill -9 ${CONFIG.appMetadata.productName} || true`);
|
||||
}
|
||||
};
|
@ -1,41 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const coffeelint = require('coffeelint');
|
||||
const expandGlobPaths = require('./expand-glob-paths');
|
||||
const path = require('path');
|
||||
const readFiles = require('./read-files');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function() {
|
||||
const globPathsToLint = [
|
||||
path.join(CONFIG.repositoryRootPath, 'dot-atom/**/*.coffee'),
|
||||
path.join(CONFIG.repositoryRootPath, 'src/**/*.coffee'),
|
||||
path.join(CONFIG.repositoryRootPath, 'spec/*.coffee')
|
||||
];
|
||||
return expandGlobPaths(globPathsToLint)
|
||||
.then(readFiles)
|
||||
.then(files => {
|
||||
const errors = [];
|
||||
const lintConfiguration = require(path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'coffeelint.json'
|
||||
));
|
||||
for (let file of files) {
|
||||
const lintErrors = coffeelint.lint(
|
||||
file.content,
|
||||
lintConfiguration,
|
||||
false
|
||||
);
|
||||
for (let error of lintErrors) {
|
||||
errors.push({
|
||||
path: file.path,
|
||||
lineNumber: error.lineNumber,
|
||||
message: error.message,
|
||||
rule: error.rule
|
||||
});
|
||||
}
|
||||
}
|
||||
return errors;
|
||||
});
|
||||
};
|
@ -1,60 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const { spawn } = require('child_process');
|
||||
const process = require('process');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = async function() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const eslintArgs = ['--cache', '--format', 'json'];
|
||||
|
||||
if (process.argv.includes('--fix')) {
|
||||
eslintArgs.push('--fix');
|
||||
}
|
||||
|
||||
const eslintBinary = process.platform === 'win32' ? 'eslint.cmd' : 'eslint';
|
||||
const eslint = spawn(
|
||||
path.join('script', 'node_modules', '.bin', eslintBinary),
|
||||
[...eslintArgs, '.'],
|
||||
{ cwd: CONFIG.repositoryRootPath }
|
||||
);
|
||||
|
||||
let output = '';
|
||||
let errorOutput = '';
|
||||
eslint.stdout.on('data', data => {
|
||||
output += data.toString();
|
||||
});
|
||||
|
||||
eslint.stderr.on('data', data => {
|
||||
errorOutput += data.toString();
|
||||
});
|
||||
|
||||
eslint.on('error', error => reject(error));
|
||||
eslint.on('close', exitCode => {
|
||||
const errors = [];
|
||||
let files;
|
||||
|
||||
try {
|
||||
files = JSON.parse(output);
|
||||
} catch (_) {
|
||||
reject(errorOutput);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const file of files) {
|
||||
for (const error of file.messages) {
|
||||
errors.push({
|
||||
path: file.filePath,
|
||||
message: error.message,
|
||||
lineNumber: error.line,
|
||||
rule: error.ruleId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
resolve(errors);
|
||||
});
|
||||
});
|
||||
};
|
@ -1,63 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const stylelint = require('stylelint');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function() {
|
||||
return stylelint
|
||||
.lint({
|
||||
files: path.join(CONFIG.repositoryRootPath, 'static/**/*.less'),
|
||||
configBasedir: __dirname,
|
||||
configFile: path.resolve(__dirname, '..', '..', 'stylelint.config.js')
|
||||
})
|
||||
.then(({ results }) => {
|
||||
const errors = [];
|
||||
|
||||
for (const result of results) {
|
||||
for (const deprecation of result.deprecations) {
|
||||
console.log('stylelint encountered deprecation:', deprecation.text);
|
||||
if (deprecation.reference != null) {
|
||||
console.log('more information at', deprecation.reference);
|
||||
}
|
||||
}
|
||||
|
||||
for (const invalidOptionWarning of result.invalidOptionWarnings) {
|
||||
console.warn(
|
||||
'stylelint encountered invalid option:',
|
||||
invalidOptionWarning.text
|
||||
);
|
||||
}
|
||||
|
||||
if (result.errored) {
|
||||
for (const warning of result.warnings) {
|
||||
if (warning.severity === 'error') {
|
||||
errors.push({
|
||||
path: result.source,
|
||||
lineNumber: warning.line,
|
||||
message: warning.text,
|
||||
rule: warning.rule
|
||||
});
|
||||
} else {
|
||||
console.warn(
|
||||
'stylelint encountered non-critical warning in file',
|
||||
result.source,
|
||||
'at line',
|
||||
warning.line,
|
||||
'for rule',
|
||||
warning.rule + ':',
|
||||
warning.text
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return errors;
|
||||
})
|
||||
.catch(err => {
|
||||
console.error('There was a problem linting LESS:');
|
||||
throw err;
|
||||
});
|
||||
};
|
@ -1,18 +0,0 @@
|
||||
const notarize = require('electron-notarize').notarize;
|
||||
|
||||
module.exports = async function(packagedAppPath) {
|
||||
const appBundleId = 'com.github.pulsar';
|
||||
const appleId = process.env.AC_USER;
|
||||
const appleIdPassword = process.env.AC_PASSWORD;
|
||||
console.log(`Notarizing application at ${packagedAppPath}`);
|
||||
try {
|
||||
await notarize({
|
||||
appBundleId: appBundleId,
|
||||
appPath: packagedAppPath,
|
||||
appleId: appleId,
|
||||
appleIdPassword: appleIdPassword
|
||||
});
|
||||
} catch (e) {
|
||||
throw new Error(e);
|
||||
}
|
||||
};
|
@ -1,278 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const assert = require('assert');
|
||||
const childProcess = require('child_process');
|
||||
const electronPackager = require('electron-packager');
|
||||
const fs = require('fs-extra');
|
||||
const hostArch = require('@electron/get').getHostArch;
|
||||
const includePathInPackagedApp = require('./include-path-in-packaged-app');
|
||||
const getLicenseText = require('./get-license-text');
|
||||
const path = require('path');
|
||||
const spawnSync = require('./spawn-sync');
|
||||
const template = require('lodash.template');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
const HOST_ARCH = hostArch();
|
||||
|
||||
module.exports = function() {
|
||||
const appName = getAppName();
|
||||
console.log(
|
||||
`Running electron-packager on ${
|
||||
CONFIG.intermediateAppPath
|
||||
} with app name "${appName}"`
|
||||
);
|
||||
return runPackager({
|
||||
appBundleId: 'com.github.pulsar',
|
||||
//TODO_PULSAR: Check to see if we should/need to migrate away from GitHub as a CompanyName
|
||||
appCopyright: `Copyright © 2014-${new Date().getFullYear()} GitHub, Inc. All rights reserved.`,
|
||||
appVersion: CONFIG.appMetadata.version,
|
||||
arch: process.platform === 'darwin' ? 'x64' : HOST_ARCH, // OS X is 64-bit only
|
||||
asar: { unpack: buildAsarUnpackGlobExpression() },
|
||||
buildVersion: CONFIG.appMetadata.version,
|
||||
derefSymlinks: false,
|
||||
download: { cache: CONFIG.electronDownloadPath },
|
||||
dir: CONFIG.intermediateAppPath,
|
||||
electronVersion: CONFIG.appMetadata.electronVersion,
|
||||
extendInfo: path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'mac',
|
||||
'pulsar-Info.plist'
|
||||
),
|
||||
helperBundleId: 'com.github.pulsar.helper',
|
||||
icon: path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'app-icons',
|
||||
CONFIG.channel,
|
||||
'pulsar'
|
||||
),
|
||||
name: appName,
|
||||
out: CONFIG.buildOutputPath,
|
||||
overwrite: true,
|
||||
platform: process.platform,
|
||||
// Pulsar doesn't have devDependencies, but if prune is true, it will delete the non-standard packageDependencies.
|
||||
prune: false,
|
||||
win32metadata: {
|
||||
//TODO_PULSAR: Check to see if we should/need to migrate away from GitHub as a CompanyName
|
||||
CompanyName: 'GitHub, Inc.',
|
||||
FileDescription: 'Pulsar',
|
||||
ProductName: CONFIG.appName
|
||||
}
|
||||
}).then(packagedAppPath => {
|
||||
let bundledResourcesPath;
|
||||
if (process.platform === 'darwin') {
|
||||
bundledResourcesPath = path.join(
|
||||
packagedAppPath,
|
||||
'Contents',
|
||||
'Resources'
|
||||
);
|
||||
setAtomHelperVersion(packagedAppPath);
|
||||
} else if (process.platform === 'linux') {
|
||||
bundledResourcesPath = path.join(packagedAppPath, 'resources');
|
||||
chmodNodeFiles(packagedAppPath);
|
||||
} else {
|
||||
bundledResourcesPath = path.join(packagedAppPath, 'resources');
|
||||
}
|
||||
|
||||
return copyNonASARResources(packagedAppPath, bundledResourcesPath).then(
|
||||
() => {
|
||||
console.log(`Application bundle created at ${packagedAppPath}`);
|
||||
return packagedAppPath;
|
||||
}
|
||||
);
|
||||
});
|
||||
};
|
||||
|
||||
function copyNonASARResources(packagedAppPath, bundledResourcesPath) {
|
||||
console.log(`Copying non-ASAR resources to ${bundledResourcesPath}`);
|
||||
fs.copySync(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'apm',
|
||||
'node_modules',
|
||||
'atom-package-manager'
|
||||
),
|
||||
path.join(bundledResourcesPath, 'app', 'apm'),
|
||||
{ filter: includePathInPackagedApp }
|
||||
);
|
||||
if (process.platform !== 'win32') {
|
||||
// Existing symlinks on user systems point to an outdated path, so just symlink it to the real location of the apm binary.
|
||||
// TODO: Change command installer to point to appropriate path and remove this fallback after a few releases.
|
||||
fs.symlinkSync(
|
||||
path.join('..', '..', 'bin', 'apm'),
|
||||
path.join(
|
||||
bundledResourcesPath,
|
||||
'app',
|
||||
'apm',
|
||||
'node_modules',
|
||||
'.bin',
|
||||
'apm'
|
||||
)
|
||||
);
|
||||
fs.copySync(
|
||||
path.join(CONFIG.repositoryRootPath, 'pulsar.sh'),
|
||||
path.join(bundledResourcesPath, 'app', 'pulsar.sh')
|
||||
);
|
||||
}
|
||||
if (process.platform === 'darwin') {
|
||||
fs.copySync(
|
||||
path.join(CONFIG.repositoryRootPath, 'resources', 'mac', 'file.icns'),
|
||||
path.join(bundledResourcesPath, 'file.icns')
|
||||
);
|
||||
} else if (process.platform === 'linux') {
|
||||
fs.copySync(
|
||||
path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'resources',
|
||||
'app-icons',
|
||||
CONFIG.channel,
|
||||
'png',
|
||||
'1024.png'
|
||||
),
|
||||
path.join(packagedAppPath, 'pulsar.png')
|
||||
);
|
||||
} else if (process.platform === 'win32') {
|
||||
[
|
||||
'pulsar.sh',
|
||||
'pulsar.js',
|
||||
'apm.cmd',
|
||||
'apm.sh',
|
||||
'file.ico',
|
||||
'folder.ico'
|
||||
].forEach(file =>
|
||||
fs.copySync(
|
||||
path.join(CONFIG.repositoryRootPath, 'resources', 'win', file),
|
||||
path.join(bundledResourcesPath, 'cli', file)
|
||||
)
|
||||
);
|
||||
|
||||
// Customize pulsar.cmd for the channel-specific pulsar.exe name (e.g. pulsar-beta.exe)
|
||||
generateAtomCmdForChannel(bundledResourcesPath);
|
||||
}
|
||||
|
||||
console.log(`Writing LICENSE.md to ${bundledResourcesPath}`);
|
||||
return getLicenseText().then(licenseText => {
|
||||
fs.writeFileSync(
|
||||
path.join(bundledResourcesPath, 'LICENSE.md'),
|
||||
licenseText
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function setAtomHelperVersion(packagedAppPath) {
|
||||
const frameworksPath = path.join(packagedAppPath, 'Contents', 'Frameworks');
|
||||
const helperPListPath = path.join(
|
||||
frameworksPath,
|
||||
'Pulsar Helper.app',
|
||||
'Contents',
|
||||
'Info.plist'
|
||||
);
|
||||
console.log(`Setting Pulsar Helper Version for ${helperPListPath}`);
|
||||
spawnSync('/usr/libexec/PlistBuddy', [
|
||||
'-c',
|
||||
`Add CFBundleVersion string ${CONFIG.appMetadata.version}`,
|
||||
helperPListPath
|
||||
]);
|
||||
spawnSync('/usr/libexec/PlistBuddy', [
|
||||
'-c',
|
||||
`Add CFBundleShortVersionString string ${CONFIG.appMetadata.version}`,
|
||||
helperPListPath
|
||||
]);
|
||||
}
|
||||
|
||||
function chmodNodeFiles(packagedAppPath) {
|
||||
console.log(`Changing permissions for node files in ${packagedAppPath}`);
|
||||
childProcess.execSync(
|
||||
`find "${packagedAppPath}" -type f -name *.node -exec chmod a-x {} \\;`
|
||||
);
|
||||
}
|
||||
|
||||
function buildAsarUnpackGlobExpression() {
|
||||
const unpack = [
|
||||
'*.node',
|
||||
'ctags-config',
|
||||
'ctags-darwin',
|
||||
'ctags-linux',
|
||||
'ctags-win32.exe',
|
||||
path.join('**', 'node_modules', 'spellchecker', '**'),
|
||||
path.join('**', 'node_modules', 'dugite', 'git', '**'),
|
||||
path.join('**', 'node_modules', 'github', 'bin', '**'),
|
||||
path.join('**', 'node_modules', 'vscode-ripgrep', 'bin', '**'),
|
||||
path.join('**', 'resources', 'pulsar.png')
|
||||
];
|
||||
|
||||
return `{${unpack.join(',')}}`;
|
||||
}
|
||||
|
||||
function getAppName() {
|
||||
if (process.platform === 'darwin') {
|
||||
return CONFIG.appName;
|
||||
} else if (process.platform === 'win32') {
|
||||
return CONFIG.channel === 'stable' ? 'pulsar' : `pulsar-${CONFIG.channel}`;
|
||||
} else {
|
||||
return 'pulsar';
|
||||
}
|
||||
}
|
||||
|
||||
async function runPackager(options) {
|
||||
const packageOutputDirPaths = await electronPackager(options);
|
||||
|
||||
assert(
|
||||
packageOutputDirPaths.length === 1,
|
||||
'Generated more than one electron application!'
|
||||
);
|
||||
|
||||
return renamePackagedAppDir(packageOutputDirPaths[0]);
|
||||
}
|
||||
|
||||
function renamePackagedAppDir(packageOutputDirPath) {
|
||||
let packagedAppPath;
|
||||
if (process.platform === 'darwin') {
|
||||
const appBundleName = getAppName() + '.app';
|
||||
packagedAppPath = path.join(CONFIG.buildOutputPath, appBundleName);
|
||||
if (fs.existsSync(packagedAppPath)) fs.removeSync(packagedAppPath);
|
||||
fs.renameSync(
|
||||
path.join(packageOutputDirPath, appBundleName),
|
||||
packagedAppPath
|
||||
);
|
||||
} else if (process.platform === 'linux') {
|
||||
const appName =
|
||||
CONFIG.channel !== 'stable' ? `pulsar-${CONFIG.channel}` : 'pulsar';
|
||||
let architecture;
|
||||
if (HOST_ARCH === 'ia32') {
|
||||
architecture = 'i386';
|
||||
} else if (HOST_ARCH === 'x64') {
|
||||
architecture = 'amd64';
|
||||
} else {
|
||||
architecture = HOST_ARCH;
|
||||
}
|
||||
packagedAppPath = path.join(
|
||||
CONFIG.buildOutputPath,
|
||||
`${appName}-${CONFIG.appMetadata.version}-${architecture}`
|
||||
);
|
||||
if (fs.existsSync(packagedAppPath)) fs.removeSync(packagedAppPath);
|
||||
fs.renameSync(packageOutputDirPath, packagedAppPath);
|
||||
} else {
|
||||
packagedAppPath = path.join(CONFIG.buildOutputPath, CONFIG.appName);
|
||||
if (process.platform === 'win32' && HOST_ARCH !== 'ia32') {
|
||||
packagedAppPath += ` ${process.arch}`;
|
||||
}
|
||||
if (fs.existsSync(packagedAppPath)) fs.removeSync(packagedAppPath);
|
||||
fs.renameSync(packageOutputDirPath, packagedAppPath);
|
||||
}
|
||||
return packagedAppPath;
|
||||
}
|
||||
|
||||
function generateAtomCmdForChannel(bundledResourcesPath) {
|
||||
const atomCmdTemplate = fs.readFileSync(
|
||||
path.join(CONFIG.repositoryRootPath, 'resources', 'win', 'pulsar.cmd')
|
||||
);
|
||||
const atomCmdContents = template(atomCmdTemplate)({
|
||||
atomExeName: CONFIG.executableName
|
||||
});
|
||||
fs.writeFileSync(
|
||||
path.join(bundledResourcesPath, 'cli', 'pulsar.cmd'),
|
||||
atomCmdContents
|
||||
);
|
||||
}
|
@ -1,218 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const klawSync = require('klaw-sync');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
const LessCache = require('less-cache');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
const LESS_CACHE_VERSION = require('less-cache/package.json').version;
|
||||
const FALLBACK_VARIABLE_IMPORTS =
|
||||
'@import "variables/ui-variables";\n@import "variables/syntax-variables";\n';
|
||||
|
||||
module.exports = function() {
|
||||
const cacheDirPath = path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'less-compile-cache'
|
||||
);
|
||||
console.log(`Generating pre-built less cache in ${cacheDirPath}`);
|
||||
|
||||
// Group bundled packages into UI themes, syntax themes, and non-theme packages
|
||||
const uiThemes = [];
|
||||
const syntaxThemes = [];
|
||||
const nonThemePackages = [];
|
||||
for (let packageName in CONFIG.appMetadata.packageDependencies) {
|
||||
const packageMetadata = require(path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'package.json'
|
||||
));
|
||||
if (packageMetadata.theme === 'ui') {
|
||||
uiThemes.push(packageName);
|
||||
} else if (packageMetadata.theme === 'syntax') {
|
||||
syntaxThemes.push(packageName);
|
||||
} else {
|
||||
nonThemePackages.push(packageName);
|
||||
}
|
||||
}
|
||||
|
||||
CONFIG.snapshotAuxiliaryData.lessSourcesByRelativeFilePath = {};
|
||||
function saveIntoSnapshotAuxiliaryData(absoluteFilePath, content) {
|
||||
const relativeFilePath = path.relative(
|
||||
CONFIG.intermediateAppPath,
|
||||
absoluteFilePath
|
||||
);
|
||||
if (
|
||||
!CONFIG.snapshotAuxiliaryData.lessSourcesByRelativeFilePath.hasOwnProperty(
|
||||
relativeFilePath
|
||||
)
|
||||
) {
|
||||
CONFIG.snapshotAuxiliaryData.lessSourcesByRelativeFilePath[
|
||||
relativeFilePath
|
||||
] = {
|
||||
content: content,
|
||||
digest: LessCache.digestForContent(content)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
CONFIG.snapshotAuxiliaryData.importedFilePathsByRelativeImportPath = {};
|
||||
// Warm cache for every combination of the default UI and syntax themes,
|
||||
// because themes assign variables which may be used in any style sheet.
|
||||
for (let uiTheme of uiThemes) {
|
||||
for (let syntaxTheme of syntaxThemes) {
|
||||
// Build a LessCache instance with import paths based on the current theme combination
|
||||
const lessCache = new LessCache({
|
||||
cacheDir: cacheDirPath,
|
||||
fallbackDir: path.join(
|
||||
CONFIG.atomHomeDirPath,
|
||||
'compile-cache',
|
||||
'prebuild-less',
|
||||
LESS_CACHE_VERSION
|
||||
),
|
||||
syncCaches: true,
|
||||
resourcePath: CONFIG.intermediateAppPath,
|
||||
importPaths: [
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
syntaxTheme,
|
||||
'styles'
|
||||
),
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
uiTheme,
|
||||
'styles'
|
||||
),
|
||||
path.join(CONFIG.intermediateAppPath, 'static', 'variables'),
|
||||
path.join(CONFIG.intermediateAppPath, 'static')
|
||||
]
|
||||
});
|
||||
|
||||
// Store file paths located at the import paths so that we can avoid scanning them at runtime.
|
||||
for (const absoluteImportPath of lessCache.getImportPaths()) {
|
||||
const relativeImportPath = path.relative(
|
||||
CONFIG.intermediateAppPath,
|
||||
absoluteImportPath
|
||||
);
|
||||
if (
|
||||
!CONFIG.snapshotAuxiliaryData.importedFilePathsByRelativeImportPath.hasOwnProperty(
|
||||
relativeImportPath
|
||||
)
|
||||
) {
|
||||
CONFIG.snapshotAuxiliaryData.importedFilePathsByRelativeImportPath[
|
||||
relativeImportPath
|
||||
] = [];
|
||||
for (const importedFile of klawSync(absoluteImportPath, {
|
||||
nodir: true
|
||||
})) {
|
||||
CONFIG.snapshotAuxiliaryData.importedFilePathsByRelativeImportPath[
|
||||
relativeImportPath
|
||||
].push(
|
||||
path.relative(CONFIG.intermediateAppPath, importedFile.path)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cache all styles in static; don't append variable imports
|
||||
for (let lessFilePath of glob.sync(
|
||||
path.join(CONFIG.intermediateAppPath, 'static', '**', '*.less')
|
||||
)) {
|
||||
cacheCompiledCSS(lessCache, lessFilePath, false);
|
||||
}
|
||||
|
||||
// Cache styles for all bundled non-theme packages
|
||||
for (let nonThemePackage of nonThemePackages) {
|
||||
for (let lessFilePath of glob.sync(
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
nonThemePackage,
|
||||
'**',
|
||||
'*.less'
|
||||
)
|
||||
)) {
|
||||
cacheCompiledCSS(lessCache, lessFilePath, true);
|
||||
}
|
||||
}
|
||||
|
||||
// Cache styles for this UI theme
|
||||
const uiThemeMainPath = path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
uiTheme,
|
||||
'index.less'
|
||||
);
|
||||
cacheCompiledCSS(lessCache, uiThemeMainPath, true);
|
||||
for (let lessFilePath of glob.sync(
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
uiTheme,
|
||||
'**',
|
||||
'*.less'
|
||||
)
|
||||
)) {
|
||||
if (lessFilePath !== uiThemeMainPath) {
|
||||
saveIntoSnapshotAuxiliaryData(
|
||||
lessFilePath,
|
||||
fs.readFileSync(lessFilePath, 'utf8')
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Cache styles for this syntax theme
|
||||
const syntaxThemeMainPath = path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
syntaxTheme,
|
||||
'index.less'
|
||||
);
|
||||
cacheCompiledCSS(lessCache, syntaxThemeMainPath, true);
|
||||
for (let lessFilePath of glob.sync(
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
syntaxTheme,
|
||||
'**',
|
||||
'*.less'
|
||||
)
|
||||
)) {
|
||||
if (lessFilePath !== syntaxThemeMainPath) {
|
||||
saveIntoSnapshotAuxiliaryData(
|
||||
lessFilePath,
|
||||
fs.readFileSync(lessFilePath, 'utf8')
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (let lessFilePath of glob.sync(
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
'atom-ui',
|
||||
'**',
|
||||
'*.less'
|
||||
)
|
||||
)) {
|
||||
saveIntoSnapshotAuxiliaryData(
|
||||
lessFilePath,
|
||||
fs.readFileSync(lessFilePath, 'utf8')
|
||||
);
|
||||
}
|
||||
|
||||
function cacheCompiledCSS(lessCache, lessFilePath, importFallbackVariables) {
|
||||
let lessSource = fs.readFileSync(lessFilePath, 'utf8');
|
||||
if (importFallbackVariables) {
|
||||
lessSource = FALLBACK_VARIABLE_IMPORTS + lessSource;
|
||||
}
|
||||
lessCache.cssForFile(lessFilePath, lessSource);
|
||||
saveIntoSnapshotAuxiliaryData(lessFilePath, lessSource);
|
||||
}
|
||||
};
|
@ -1,19 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
|
||||
module.exports = function(paths) {
|
||||
return Promise.all(paths.map(readFile));
|
||||
};
|
||||
|
||||
function readFile(path) {
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.readFile(path, 'utf8', (error, content) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
resolve({ path, content });
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
@ -1,17 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const childProcess = require('child_process');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
module.exports = function(packagePath, ci, stdioOptions) {
|
||||
const installEnv = Object.assign({}, process.env);
|
||||
// Set resource path so that apm can load metadata related to Pulsar.
|
||||
installEnv.ATOM_RESOURCE_PATH = CONFIG.repositoryRootPath;
|
||||
|
||||
childProcess.execFileSync(CONFIG.getApmBinPath(), [ci ? 'ci' : 'install'], {
|
||||
env: installEnv,
|
||||
cwd: packagePath,
|
||||
stdio: stdioOptions || 'inherit'
|
||||
});
|
||||
};
|
@ -1,22 +0,0 @@
|
||||
// This file exports a function that has the same interface as
|
||||
// `spawnSync`, but it throws if there's an error while executing
|
||||
// the supplied command or if the exit code is not 0. This is similar to what
|
||||
// `execSync` does, but we want to use `spawnSync` because it provides automatic
|
||||
// escaping for the supplied arguments.
|
||||
|
||||
const childProcess = require('child_process');
|
||||
|
||||
module.exports = function() {
|
||||
const result = childProcess.spawnSync.apply(childProcess, arguments);
|
||||
if (result.error) {
|
||||
throw result.error;
|
||||
} else if (result.status !== 0) {
|
||||
if (result.stdout) console.error(result.stdout.toString());
|
||||
if (result.stderr) console.error(result.stderr.toString());
|
||||
throw new Error(
|
||||
`Command ${result.args.join(' ')} exited with code "${result.status}"`
|
||||
);
|
||||
} else {
|
||||
return result;
|
||||
}
|
||||
};
|
@ -1,36 +0,0 @@
|
||||
const spawnSync = require('./spawn-sync');
|
||||
|
||||
module.exports = function(packagedAppPath) {
|
||||
const result = spawnSync('security', [
|
||||
'find-certificate',
|
||||
'-c',
|
||||
'Mac Developer'
|
||||
]);
|
||||
|
||||
const certMatch = (result.stdout || '')
|
||||
.toString()
|
||||
.match(/"(Mac Developer.*\))"/);
|
||||
if (!certMatch || !certMatch[1]) {
|
||||
console.error(
|
||||
'A "Mac Developer" certificate must be configured to perform test signing'
|
||||
);
|
||||
} else {
|
||||
// This code-signs the application with a local certificate which won't be
|
||||
// useful anywhere else but the current machine
|
||||
// See this issue for more details: https://github.com/electron/electron/issues/7476#issuecomment-356084754
|
||||
console.log(`Found development certificate '${certMatch[1]}'`);
|
||||
console.log(`Test-signing application at ${packagedAppPath}`);
|
||||
spawnSync(
|
||||
'codesign',
|
||||
[
|
||||
'--deep',
|
||||
'--force',
|
||||
'--verbose',
|
||||
'--sign',
|
||||
certMatch[1],
|
||||
packagedAppPath
|
||||
],
|
||||
{ stdio: 'inherit' }
|
||||
);
|
||||
}
|
||||
};
|
@ -1,55 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const CompileCache = require('../../src/compile-cache');
|
||||
const fs = require('fs');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
function transpileBabelPaths() {
|
||||
console.log(`Transpiling Babel paths in ${CONFIG.intermediateAppPath}`);
|
||||
for (let path of getPathsToTranspile()) {
|
||||
transpileBabelPath(path);
|
||||
}
|
||||
}
|
||||
|
||||
function getPathsToTranspile() {
|
||||
let paths = [];
|
||||
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
|
||||
paths = paths.concat(
|
||||
glob.sync(
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'**',
|
||||
'*.js'
|
||||
),
|
||||
{
|
||||
ignore: path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'spec',
|
||||
'**',
|
||||
'*.js'
|
||||
),
|
||||
nodir: true
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
function transpileBabelPath(path) {
|
||||
fs.writeFileSync(
|
||||
path,
|
||||
CompileCache.addPathToCache(path, CONFIG.atomHomeDirPath)
|
||||
);
|
||||
}
|
||||
|
||||
const { expose } = require(`${CONFIG.scriptRunnerModulesPath}/threads/worker`);
|
||||
expose(transpileBabelPaths);
|
||||
module.exports = transpileBabelPaths;
|
@ -1,69 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const CompileCache = require('../../src/compile-cache');
|
||||
const fs = require('fs');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
function transpileCoffeeScriptPaths() {
|
||||
console.log(
|
||||
`Transpiling CoffeeScript paths in ${CONFIG.intermediateAppPath}`
|
||||
);
|
||||
for (let path of getPathsToTranspile()) {
|
||||
transpileCoffeeScriptPath(path);
|
||||
}
|
||||
}
|
||||
|
||||
function getPathsToTranspile() {
|
||||
let paths = [];
|
||||
paths = paths.concat(
|
||||
glob.sync(path.join(CONFIG.intermediateAppPath, 'src', '**', '*.coffee'), {
|
||||
nodir: true
|
||||
})
|
||||
);
|
||||
paths = paths.concat(
|
||||
glob.sync(path.join(CONFIG.intermediateAppPath, 'spec', '*.coffee'), {
|
||||
nodir: true
|
||||
})
|
||||
);
|
||||
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
|
||||
paths = paths.concat(
|
||||
glob.sync(
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'**',
|
||||
'*.coffee'
|
||||
),
|
||||
{
|
||||
ignore: path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'spec',
|
||||
'**',
|
||||
'*.coffee'
|
||||
),
|
||||
nodir: true
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
function transpileCoffeeScriptPath(coffeePath) {
|
||||
const jsPath = coffeePath.replace(/coffee$/g, 'js');
|
||||
fs.writeFileSync(
|
||||
jsPath,
|
||||
CompileCache.addPathToCache(coffeePath, CONFIG.atomHomeDirPath)
|
||||
);
|
||||
fs.unlinkSync(coffeePath);
|
||||
}
|
||||
|
||||
const { expose } = require(`${CONFIG.scriptRunnerModulesPath}/threads/worker`);
|
||||
expose(transpileCoffeeScriptPaths);
|
||||
module.exports = transpileCoffeeScriptPaths;
|
@ -1,59 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const CompileCache = require('../../src/compile-cache');
|
||||
const fs = require('fs');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
function transpileCsonPaths() {
|
||||
console.log(`Transpiling CSON paths in ${CONFIG.intermediateAppPath}`);
|
||||
for (let path of getPathsToTranspile()) {
|
||||
transpileCsonPath(path);
|
||||
}
|
||||
}
|
||||
|
||||
function getPathsToTranspile() {
|
||||
let paths = [];
|
||||
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
|
||||
paths = paths.concat(
|
||||
glob.sync(
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'**',
|
||||
'*.cson'
|
||||
),
|
||||
{
|
||||
ignore: path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'spec',
|
||||
'**',
|
||||
'*.cson'
|
||||
),
|
||||
nodir: true
|
||||
}
|
||||
)
|
||||
);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
function transpileCsonPath(csonPath) {
|
||||
const jsonPath = csonPath.replace(/cson$/g, 'json');
|
||||
fs.writeFileSync(
|
||||
jsonPath,
|
||||
JSON.stringify(
|
||||
CompileCache.addPathToCache(csonPath, CONFIG.atomHomeDirPath)
|
||||
)
|
||||
);
|
||||
fs.unlinkSync(csonPath);
|
||||
}
|
||||
|
||||
const { expose } = require(`${CONFIG.scriptRunnerModulesPath}/threads/worker`);
|
||||
expose(transpileCsonPaths);
|
||||
module.exports = transpileCsonPaths;
|
@ -1,92 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const CompileCache = require('../../src/compile-cache');
|
||||
const fs = require('fs-extra');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
const backupNodeModules = require('./backup-node-modules');
|
||||
const runApmInstall = require('./run-apm-install');
|
||||
|
||||
require('colors');
|
||||
|
||||
function transpilePackagesWithCustomTranspilerPaths() {
|
||||
console.log(
|
||||
`Transpiling packages with custom transpiler configurations in ${
|
||||
CONFIG.intermediateAppPath
|
||||
}`
|
||||
);
|
||||
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
|
||||
const rootPackagePath = path.join(
|
||||
CONFIG.repositoryRootPath,
|
||||
'node_modules',
|
||||
packageName
|
||||
);
|
||||
const intermediatePackagePath = path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName
|
||||
);
|
||||
|
||||
const metadataPath = path.join(intermediatePackagePath, 'package.json');
|
||||
const metadata = require(metadataPath);
|
||||
|
||||
if (metadata.atomTranspilers) {
|
||||
console.log(' transpiling for package '.cyan + packageName.cyan);
|
||||
const rootPackageBackup = backupNodeModules(rootPackagePath);
|
||||
const intermediatePackageBackup = backupNodeModules(
|
||||
intermediatePackagePath
|
||||
);
|
||||
|
||||
// Run `apm install` in the *root* package's path, so we get devDeps w/o apm's weird caching
|
||||
// Then copy this folder into the intermediate package's path so we can run the transpilation in-line.
|
||||
runApmInstall(rootPackagePath);
|
||||
if (fs.existsSync(intermediatePackageBackup.nodeModulesPath)) {
|
||||
fs.removeSync(intermediatePackageBackup.nodeModulesPath);
|
||||
}
|
||||
fs.copySync(
|
||||
rootPackageBackup.nodeModulesPath,
|
||||
intermediatePackageBackup.nodeModulesPath
|
||||
);
|
||||
|
||||
CompileCache.addTranspilerConfigForPath(
|
||||
intermediatePackagePath,
|
||||
metadata.name,
|
||||
metadata,
|
||||
metadata.atomTranspilers
|
||||
);
|
||||
for (let config of metadata.atomTranspilers) {
|
||||
const pathsToCompile = glob.sync(
|
||||
path.join(intermediatePackagePath, config.glob),
|
||||
{ nodir: true }
|
||||
);
|
||||
pathsToCompile.forEach(transpilePath);
|
||||
}
|
||||
|
||||
// Now that we've transpiled everything in-place, we no longer want Pulsar to try to transpile
|
||||
// the same files when they're being required.
|
||||
delete metadata.atomTranspilers;
|
||||
fs.writeFileSync(
|
||||
metadataPath,
|
||||
JSON.stringify(metadata, null, ' '),
|
||||
'utf8'
|
||||
);
|
||||
|
||||
CompileCache.removeTranspilerConfigForPath(intermediatePackagePath);
|
||||
rootPackageBackup.restore();
|
||||
intermediatePackageBackup.restore();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function transpilePath(path) {
|
||||
fs.writeFileSync(
|
||||
path,
|
||||
CompileCache.addPathToCache(path, CONFIG.atomHomeDirPath)
|
||||
);
|
||||
}
|
||||
|
||||
const { expose } = require(`${CONFIG.scriptRunnerModulesPath}/threads/worker`);
|
||||
expose(transpilePackagesWithCustomTranspilerPaths);
|
||||
module.exports = transpilePackagesWithCustomTranspilerPaths;
|
@ -1,47 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const peg = require('pegjs');
|
||||
const fs = require('fs');
|
||||
const glob = require('glob');
|
||||
const path = require('path');
|
||||
|
||||
const CONFIG = require('../config');
|
||||
|
||||
function transpilePegJsPaths() {
|
||||
console.log(`Transpiling PEG.js paths in ${CONFIG.intermediateAppPath}`);
|
||||
for (let path of getPathsToTranspile()) {
|
||||
transpilePegJsPath(path);
|
||||
}
|
||||
}
|
||||
|
||||
function getPathsToTranspile() {
|
||||
let paths = [];
|
||||
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
|
||||
paths = paths.concat(
|
||||
glob.sync(
|
||||
path.join(
|
||||
CONFIG.intermediateAppPath,
|
||||
'node_modules',
|
||||
packageName,
|
||||
'**',
|
||||
'*.pegjs'
|
||||
),
|
||||
{ nodir: true }
|
||||
)
|
||||
);
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
function transpilePegJsPath(pegJsPath) {
|
||||
const inputCode = fs.readFileSync(pegJsPath, 'utf8');
|
||||
const jsPath = pegJsPath.replace(/pegjs$/g, 'js');
|
||||
const outputCode =
|
||||
'module.exports = ' + peg.buildParser(inputCode, { output: 'source' });
|
||||
fs.writeFileSync(jsPath, outputCode);
|
||||
fs.unlinkSync(pegJsPath);
|
||||
}
|
||||
|
||||
const { expose } = require(`${CONFIG.scriptRunnerModulesPath}/threads/worker`);
|
||||
expose(transpilePegJsPaths);
|
||||
module.exports = transpilePegJsPaths;
|
@ -1,81 +0,0 @@
|
||||
const fetch = require('node-fetch');
|
||||
const npmCheck = require('npm-check');
|
||||
|
||||
// this may be updated to use github releases instead
|
||||
const apm = async function({ dependencies, packageDependencies }) {
|
||||
try {
|
||||
console.log('Checking apm registry...');
|
||||
const coreDependencies = Object.keys(dependencies).filter(dependency => {
|
||||
// all core packages point to a remote url
|
||||
return dependencies[dependency].match(new RegExp('^https?://'));
|
||||
});
|
||||
|
||||
const promises = coreDependencies.map(async dependency => {
|
||||
return fetch(`https://atom.io/api/packages/${dependency}`)
|
||||
.then(res => res.json())
|
||||
.then(res => res)
|
||||
.catch(ex => console.log(ex.message));
|
||||
});
|
||||
|
||||
const packages = await Promise.all(promises);
|
||||
const outdatedPackages = [];
|
||||
packages.map(dependency => {
|
||||
if (dependency.hasOwnProperty('name')) {
|
||||
const latestVersion = dependency.releases.latest;
|
||||
const installed = packageDependencies[dependency.name];
|
||||
if (latestVersion > installed) {
|
||||
outdatedPackages.push({
|
||||
moduleName: dependency.name,
|
||||
latest: dependency.releases.latest,
|
||||
isCorePackage: true,
|
||||
installed
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`${outdatedPackages.length} outdated package(s) found`);
|
||||
|
||||
return outdatedPackages;
|
||||
} catch (ex) {
|
||||
console.error(`An error occured: ${ex.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
const npm = async function(cwd) {
|
||||
try {
|
||||
console.log('Checking npm registry...', cwd);
|
||||
|
||||
const currentState = await npmCheck({
|
||||
cwd,
|
||||
ignoreDev: true,
|
||||
skipUnused: true
|
||||
});
|
||||
|
||||
const outdatedPackages = currentState
|
||||
.get('packages')
|
||||
.filter(p => {
|
||||
if (p.packageJson && p.latest && p.installed) {
|
||||
return p.latest > p.installed;
|
||||
}
|
||||
})
|
||||
.map(({ packageJson, installed, moduleName, latest }) => ({
|
||||
packageJson,
|
||||
installed,
|
||||
moduleName,
|
||||
latest,
|
||||
isCorePackage: false
|
||||
}));
|
||||
|
||||
console.log(`${outdatedPackages.length} outdated package(s) found`);
|
||||
|
||||
return outdatedPackages;
|
||||
} catch (ex) {
|
||||
console.error(`An error occured: ${ex.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
apm,
|
||||
npm
|
||||
};
|
@ -1,78 +0,0 @@
|
||||
const { REPO_OWNER, MAIN_REPO } = require('../../config');
|
||||
|
||||
const git = (git, repositoryRootPath) => {
|
||||
const path = require('path');
|
||||
const packageJsonFilePath = path.join(repositoryRootPath, 'package.json');
|
||||
const packageLockFilePath = path.join(
|
||||
repositoryRootPath,
|
||||
'package-lock.json'
|
||||
);
|
||||
try {
|
||||
//TODO_PULSAR: Update remotes
|
||||
git.getRemotes((err, remotes) => {
|
||||
if (!err && !remotes.map(({ name }) => name).includes('ATOM')) {
|
||||
git.addRemote(
|
||||
'ATOM',
|
||||
`https://atom:${
|
||||
process.env.AUTH_TOKEN
|
||||
}@github.com/${REPO_OWNER}/${MAIN_REPO}.git/`
|
||||
);
|
||||
}
|
||||
});
|
||||
} catch (ex) {
|
||||
console.log(ex.message);
|
||||
}
|
||||
|
||||
async function createOrCheckoutBranch(newBranch) {
|
||||
await git.fetch();
|
||||
const { branches } = await git.branch();
|
||||
const found = Object.keys(branches).find(
|
||||
branch => branch.indexOf(newBranch) > -1
|
||||
);
|
||||
found
|
||||
? await git.checkout(found)
|
||||
: await git.checkoutLocalBranch(newBranch);
|
||||
|
||||
return { found, newBranch };
|
||||
}
|
||||
|
||||
return {
|
||||
switchToCleanBranch: async function() {
|
||||
const cleanBranch = 'clean-branch';
|
||||
const { current } = await git.branch();
|
||||
if (current !== cleanBranch) createOrCheckoutBranch(cleanBranch);
|
||||
},
|
||||
makeBranch: async function(dependency) {
|
||||
const newBranch = `${dependency.moduleName}-${dependency.latest}`;
|
||||
const { files } = await git.status();
|
||||
if (files.length > 0) {
|
||||
await git.reset('hard');
|
||||
}
|
||||
return createOrCheckoutBranch(newBranch);
|
||||
},
|
||||
createCommit: async function({ moduleName, latest }) {
|
||||
try {
|
||||
const commitMessage = `:arrow_up: ${moduleName}@${latest}`;
|
||||
await git.add([packageJsonFilePath, packageLockFilePath]);
|
||||
await git.commit(commitMessage);
|
||||
} catch (ex) {
|
||||
throw Error(ex.message);
|
||||
}
|
||||
},
|
||||
publishBranch: async function(branch) {
|
||||
try {
|
||||
await git.push('ATOM', branch);
|
||||
} catch (ex) {
|
||||
throw Error(ex.message);
|
||||
}
|
||||
},
|
||||
deleteBranch: async function(branch) {
|
||||
try {
|
||||
await git.deleteLocalBranch(branch, true);
|
||||
} catch (ex) {
|
||||
throw Error(ex.message);
|
||||
}
|
||||
}
|
||||
};
|
||||
};
|
||||
module.exports = git;
|
@ -1,3 +0,0 @@
|
||||
const run = require('./main');
|
||||
|
||||
run();
|
@ -1,122 +0,0 @@
|
||||
/* eslint-disable camelcase */
|
||||
const simpleGit = require('simple-git');
|
||||
const path = require('path');
|
||||
|
||||
const { repositoryRootPath } = require('../../config');
|
||||
const packageJSON = require(path.join(repositoryRootPath, 'package.json'));
|
||||
const git = simpleGit(repositoryRootPath);
|
||||
const {
|
||||
createPR,
|
||||
findPR,
|
||||
addLabel,
|
||||
findOpenPRs,
|
||||
checkCIstatus,
|
||||
mergePR
|
||||
} = require('./pull-request');
|
||||
const runApmInstall = require('../run-apm-install');
|
||||
const {
|
||||
makeBranch,
|
||||
createCommit,
|
||||
switchToCleanBranch,
|
||||
publishBranch,
|
||||
deleteBranch
|
||||
} = require('./git')(git, repositoryRootPath);
|
||||
const { updatePackageJson, sleep } = require('./util')(repositoryRootPath);
|
||||
const fetchOutdatedDependencies = require('./fetch-outdated-dependencies');
|
||||
|
||||
module.exports = async function() {
|
||||
try {
|
||||
// ensure we are on master
|
||||
await switchToCleanBranch();
|
||||
const failedBumps = [];
|
||||
const successfullBumps = [];
|
||||
const outdateDependencies = [
|
||||
...(await fetchOutdatedDependencies.npm(repositoryRootPath)),
|
||||
...(await fetchOutdatedDependencies.apm(packageJSON))
|
||||
];
|
||||
const totalDependencies = outdateDependencies.length;
|
||||
const pendingPRs = [];
|
||||
for (const dependency of outdateDependencies) {
|
||||
const { found, newBranch } = await makeBranch(dependency);
|
||||
if (found) {
|
||||
console.log(`Branch was found ${found}`);
|
||||
console.log('checking if a PR already exists');
|
||||
const {
|
||||
data: { total_count }
|
||||
} = await findPR(dependency, newBranch);
|
||||
if (total_count > 0) {
|
||||
console.log(`pull request found!`);
|
||||
} else {
|
||||
console.log(`pull request not found!`);
|
||||
const pr = { dependency, branch: newBranch, branchIsRemote: false };
|
||||
// confirm if branch found is a local branch
|
||||
if (found.indexOf('remotes') === -1) {
|
||||
await publishBranch(found);
|
||||
} else {
|
||||
pr.branchIsRemote = true;
|
||||
}
|
||||
pendingPRs.push(pr);
|
||||
}
|
||||
} else {
|
||||
await updatePackageJson(dependency);
|
||||
runApmInstall(repositoryRootPath, false);
|
||||
await createCommit(dependency);
|
||||
await publishBranch(newBranch);
|
||||
pendingPRs.push({
|
||||
dependency,
|
||||
branch: newBranch,
|
||||
branchIsRemote: false
|
||||
});
|
||||
}
|
||||
|
||||
await switchToCleanBranch();
|
||||
}
|
||||
// create PRs here
|
||||
for (const { dependency, branch, branchIsRemote } of pendingPRs) {
|
||||
const { status, data = {} } = await createPR(dependency, branch);
|
||||
if (status === 201) {
|
||||
successfullBumps.push(dependency);
|
||||
await addLabel(data.number);
|
||||
} else {
|
||||
failedBumps.push(dependency);
|
||||
}
|
||||
|
||||
if (!branchIsRemote) {
|
||||
await deleteBranch(branch);
|
||||
}
|
||||
// https://developer.github.com/v3/guides/best-practices-for-integrators/#dealing-with-abuse-rate-limits
|
||||
await sleep(2000);
|
||||
}
|
||||
console.table([
|
||||
{
|
||||
totalDependencies,
|
||||
totalSuccessfullBumps: successfullBumps.length,
|
||||
totalFailedBumps: failedBumps.length
|
||||
}
|
||||
]);
|
||||
console.log('Successfull bumps');
|
||||
console.table(successfullBumps);
|
||||
console.log('Failed bumps');
|
||||
console.table(failedBumps);
|
||||
} catch (ex) {
|
||||
console.log(ex.message);
|
||||
}
|
||||
|
||||
// merge previous bumps that passed CI requirements
|
||||
try {
|
||||
const {
|
||||
data: { items }
|
||||
} = await findOpenPRs();
|
||||
for (const { title } of items) {
|
||||
const ref = title.replace('⬆️ ', '').replace('@', '-');
|
||||
const {
|
||||
data: { state }
|
||||
} = await checkCIstatus({ ref });
|
||||
if (state === 'success') {
|
||||
await mergePR({ ref });
|
||||
}
|
||||
}
|
||||
} catch (ex) {
|
||||
console.log(ex);
|
||||
}
|
||||
};
|
@ -1,57 +0,0 @@
|
||||
const { request } = require('@octokit/request');
|
||||
const { REPO_OWNER, MAIN_REPO } = require('../../config');
|
||||
|
||||
const requestWithAuth = request.defaults({
|
||||
baseUrl: 'https://api.github.com',
|
||||
headers: {
|
||||
'user-agent': 'pulsar',
|
||||
authorization: `token ${process.env.AUTH_TOKEN}`
|
||||
},
|
||||
owner: REPO_OWNER,
|
||||
repo: MAIN_REPO
|
||||
});
|
||||
|
||||
module.exports = {
|
||||
createPR: async (
|
||||
{ moduleName, isCorePackage, latest, installed },
|
||||
branch
|
||||
) => {
|
||||
let description = `Bumps ${moduleName} from ${installed} to ${latest}`;
|
||||
if (isCorePackage) {
|
||||
description = `*List of changes between ${moduleName}@${installed} and ${moduleName}@${latest}: https://github.com/pulsar-edit/${moduleName}/compare/v${installed}...v${latest}*`;
|
||||
}
|
||||
return requestWithAuth('POST /repos/:owner/:repo/pulls', {
|
||||
title: `⬆️ ${moduleName}@${latest}`,
|
||||
body: description,
|
||||
base: 'master',
|
||||
head: branch
|
||||
});
|
||||
},
|
||||
findPR: async ({ moduleName, latest }, branch) => {
|
||||
return requestWithAuth('GET /search/issues', {
|
||||
q: `${moduleName} type:pr ${moduleName}@${latest} in:title repo:${REPO_OWNER}/${MAIN_REPO} head:${branch}`
|
||||
});
|
||||
},
|
||||
findOpenPRs: async () => {
|
||||
return requestWithAuth('GET /search/issues', {
|
||||
q: 'type:pr repo:pulsar-edit/pulsar state:open label:"depency ⬆️"'
|
||||
});
|
||||
},
|
||||
checkCIstatus: async ({ ref }) => {
|
||||
return requestWithAuth('GET /repos/:owner/:repo/commits/:ref/status', {
|
||||
ref
|
||||
});
|
||||
},
|
||||
mergePR: async ({ ref }) => {
|
||||
return requestWithAuth('POST /repos/{owner}/{repo}/merges', {
|
||||
base: 'master',
|
||||
head: ref
|
||||
});
|
||||
},
|
||||
addLabel: async pullRequestNumber => {
|
||||
return requestWithAuth('PATCH /repos/:owner/:repo/issues/:issue_number', {
|
||||
labels: ['depency ⬆️'],
|
||||
issue_number: pullRequestNumber
|
||||
});
|
||||
}
|
||||
};
|
@ -1,27 +0,0 @@
|
||||
const path = require('path');
|
||||
const fetchOutdatedDependencies = require('../fetch-outdated-dependencies');
|
||||
const { nativeDependencies } = require('./helpers');
|
||||
const repositoryRootPath = path.resolve('.', 'fixtures', 'dummy');
|
||||
const packageJSON = require(path.join(repositoryRootPath, 'package.json'));
|
||||
|
||||
describe('Fetch outdated dependencies', function() {
|
||||
it('should fetch outdated native dependencies', async () => {
|
||||
spyOn(fetchOutdatedDependencies, 'npm').andReturn(
|
||||
Promise.resolve(nativeDependencies)
|
||||
);
|
||||
|
||||
expect(await fetchOutdatedDependencies.npm(repositoryRootPath)).toEqual(
|
||||
nativeDependencies
|
||||
);
|
||||
});
|
||||
|
||||
it('should fetch outdated core dependencies', async () => {
|
||||
spyOn(fetchOutdatedDependencies, 'apm').andReturn(
|
||||
Promise.resolve(nativeDependencies)
|
||||
);
|
||||
|
||||
expect(await fetchOutdatedDependencies.apm(packageJSON)).toEqual(
|
||||
nativeDependencies
|
||||
);
|
||||
});
|
||||
});
|
@ -1,94 +0,0 @@
|
||||
const path = require('path');
|
||||
const simpleGit = require('simple-git');
|
||||
const repositoryRootPath = path.resolve('.', 'fixtures', 'dummy');
|
||||
const git = simpleGit(repositoryRootPath);
|
||||
|
||||
const {
|
||||
switchToCleanBranch,
|
||||
makeBranch,
|
||||
publishBranch,
|
||||
createCommit,
|
||||
deleteBranch
|
||||
} = require('../git')(git, repositoryRootPath);
|
||||
|
||||
describe('GIT', () => {
|
||||
async function findBranch(branch) {
|
||||
const { branches } = await git.branch();
|
||||
return Object.keys(branches).find(_branch => _branch.indexOf(branch) > -1);
|
||||
}
|
||||
const dependency = {
|
||||
moduleName: 'atom',
|
||||
latest: '2.0.0'
|
||||
};
|
||||
const branch = `${dependency.moduleName}-${dependency.latest}`;
|
||||
|
||||
beforeEach(async () => {
|
||||
await git.checkout('clean-branch');
|
||||
});
|
||||
|
||||
it('remotes should include ATOM', async () => {
|
||||
const remotes = await git.getRemotes();
|
||||
expect(remotes.map(({ name }) => name).includes('ATOM')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('current branch should be clean-branch', async () => {
|
||||
const testBranchExists = await findBranch('test');
|
||||
testBranchExists
|
||||
? await git.checkout('test')
|
||||
: await git.checkoutLocalBranch('test');
|
||||
expect((await git.branch()).current).toBe('test');
|
||||
await switchToCleanBranch();
|
||||
expect((await git.branch()).current).toBe('clean-branch');
|
||||
await git.deleteLocalBranch('test', true);
|
||||
});
|
||||
|
||||
it('should make new branch and checkout to the new branch', async () => {
|
||||
const { found, newBranch } = await makeBranch(dependency);
|
||||
expect(found).toBe(undefined);
|
||||
expect(newBranch).toBe(branch);
|
||||
expect((await git.branch()).current).toBe(branch);
|
||||
await git.checkout('clean-branch');
|
||||
await git.deleteLocalBranch(branch, true);
|
||||
});
|
||||
|
||||
it('should find an existing branch and checkout to the branch', async () => {
|
||||
await git.checkoutLocalBranch(branch);
|
||||
const { found } = await makeBranch(dependency);
|
||||
expect(found).not.toBe(undefined);
|
||||
expect((await git.branch()).current).toBe(found);
|
||||
await git.checkout('clean-branch');
|
||||
await git.deleteLocalBranch(branch, true);
|
||||
});
|
||||
|
||||
it('should create a commit', async () => {
|
||||
const packageJsonFilePath = path.join(repositoryRootPath, 'package.json');
|
||||
const packageLockFilePath = path.join(
|
||||
repositoryRootPath,
|
||||
'package-lock.json'
|
||||
);
|
||||
spyOn(git, 'commit');
|
||||
spyOn(git, 'add');
|
||||
await createCommit(dependency);
|
||||
expect(git.add).toHaveBeenCalledWith([
|
||||
packageJsonFilePath,
|
||||
packageLockFilePath
|
||||
]);
|
||||
expect(git.commit).toHaveBeenCalledWith(
|
||||
`${`:arrow_up: ${dependency.moduleName}@${dependency.latest}`}`
|
||||
);
|
||||
});
|
||||
|
||||
it('should publish branch', async () => {
|
||||
spyOn(git, 'push');
|
||||
await publishBranch(branch);
|
||||
expect(git.push).toHaveBeenCalledWith('ATOM', branch);
|
||||
});
|
||||
|
||||
it('should delete an existing branch', async () => {
|
||||
await git.checkoutLocalBranch(branch);
|
||||
await git.checkout('clean-branch');
|
||||
expect(await findBranch(branch)).not.toBe(undefined);
|
||||
await deleteBranch(branch);
|
||||
expect(await findBranch(branch)).toBe(undefined);
|
||||
});
|
||||
});
|
@ -1,28 +0,0 @@
|
||||
const latestPackageJSON = require('./fixtures/latest-package.json');
|
||||
const packageJSON = require('./fixtures/dummy/package.json');
|
||||
module.exports = {
|
||||
coreDependencies: Object.keys(packageJSON.packageDependencies).map(
|
||||
dependency => {
|
||||
return {
|
||||
latest: latestPackageJSON.packageDependencies[dependency],
|
||||
installed: packageJSON.packageDependencies[dependency],
|
||||
moduleName: dependency,
|
||||
isCorePackage: true
|
||||
};
|
||||
}
|
||||
),
|
||||
nativeDependencies: Object.keys(packageJSON.dependencies)
|
||||
.filter(
|
||||
dependency =>
|
||||
!packageJSON.dependencies[dependency].match(new RegExp('^https?://'))
|
||||
)
|
||||
.map(dependency => {
|
||||
return {
|
||||
latest: latestPackageJSON.dependencies[dependency],
|
||||
packageJson: packageJSON.dependencies[dependency],
|
||||
installed: packageJSON.dependencies[dependency],
|
||||
moduleName: dependency,
|
||||
isCorePackage: false
|
||||
};
|
||||
})
|
||||
};
|
@ -1,53 +0,0 @@
|
||||
const nock = require('nock');
|
||||
const { createPR, findPR } = require('../pull-request');
|
||||
const createPrResponse = require('./fixtures/create-pr-response.json');
|
||||
const searchResponse = require('./fixtures/search-response.json');
|
||||
|
||||
describe('Pull Request', () => {
|
||||
it('Should create a pull request', async () => {
|
||||
const scope = nock('https://api.github.com')
|
||||
.post('/repos/atom/atom/pulls', {
|
||||
title: '⬆️ octocat@2.0.0',
|
||||
body: 'Bumps octocat from 1.0.0 to 2.0.0',
|
||||
head: 'octocat-2.0.0',
|
||||
base: 'master'
|
||||
})
|
||||
.reply(200, createPrResponse);
|
||||
const response = await createPR(
|
||||
{
|
||||
moduleName: 'octocat',
|
||||
installed: '1.0.0',
|
||||
latest: '2.0.0',
|
||||
isCorePackage: false
|
||||
},
|
||||
'octocat-2.0.0'
|
||||
);
|
||||
scope.done();
|
||||
|
||||
expect(response.data).toEqual(createPrResponse);
|
||||
});
|
||||
|
||||
it('Should search for a pull request', async () => {
|
||||
const scope = nock('https://api.github.com')
|
||||
.get('/search/issues')
|
||||
.query({
|
||||
q:
|
||||
'octocat type:pr octocat@2.0.0 in:title repo:atom/atom head:octocat-2.0.0 state:open',
|
||||
owner: 'atom',
|
||||
repo: 'atom'
|
||||
})
|
||||
.reply(200, searchResponse);
|
||||
|
||||
const response = await findPR(
|
||||
{
|
||||
moduleName: 'octocat',
|
||||
installed: '1.0.0',
|
||||
latest: '2.0.0'
|
||||
},
|
||||
'octocat-2.0.0'
|
||||
);
|
||||
scope.done();
|
||||
|
||||
expect(response.data).toEqual(searchResponse);
|
||||
});
|
||||
});
|
@ -1,38 +0,0 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const repositoryRootPath = path.resolve('.', 'fixtures', 'dummy');
|
||||
const packageJsonFilePath = path.join(repositoryRootPath, 'package.json');
|
||||
const { updatePackageJson } = require('../util')(repositoryRootPath);
|
||||
const { coreDependencies, nativeDependencies } = require('./helpers');
|
||||
|
||||
describe('Update-dependency', function() {
|
||||
const oldPackageJson = JSON.parse(
|
||||
JSON.stringify(require(packageJsonFilePath))
|
||||
);
|
||||
var packageJson;
|
||||
|
||||
it('bumps package.json properly', async function() {
|
||||
const dependencies = [...coreDependencies, ...nativeDependencies];
|
||||
for (const dependency of dependencies) {
|
||||
await updatePackageJson(dependency);
|
||||
packageJson = JSON.parse(fs.readFileSync(packageJsonFilePath, 'utf-8'));
|
||||
if (dependency.isCorePackage) {
|
||||
expect(packageJson.packageDependencies[dependency.moduleName]).toBe(
|
||||
dependency.latest
|
||||
);
|
||||
expect(packageJson.dependencies[dependency.moduleName]).toContain(
|
||||
dependency.latest
|
||||
);
|
||||
} else {
|
||||
expect(packageJson.dependencies[dependency.moduleName]).toBe(
|
||||
dependency.latest
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fs.writeFileSync(
|
||||
packageJsonFilePath,
|
||||
JSON.stringify(oldPackageJson, null, 2)
|
||||
);
|
||||
});
|
||||
});
|
@ -1,61 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const util = repositoryRootPath => {
|
||||
const packageJsonFilePath = path.join(repositoryRootPath, 'package.json');
|
||||
const packageJSON = require(packageJsonFilePath);
|
||||
return {
|
||||
updatePackageJson: async function({
|
||||
moduleName,
|
||||
installed,
|
||||
latest,
|
||||
isCorePackage = false,
|
||||
packageJson = ''
|
||||
}) {
|
||||
console.log(`Bumping ${moduleName} from ${installed} to ${latest}`);
|
||||
const updatePackageJson = JSON.parse(JSON.stringify(packageJSON));
|
||||
if (updatePackageJson.dependencies[moduleName]) {
|
||||
let searchString = installed;
|
||||
// gets the exact version installed in package json for native packages
|
||||
if (!isCorePackage) {
|
||||
if (/\^|~/.test(packageJson)) {
|
||||
searchString = new RegExp(`\\${packageJson}`);
|
||||
} else {
|
||||
searchString = packageJson;
|
||||
}
|
||||
}
|
||||
updatePackageJson.dependencies[
|
||||
moduleName
|
||||
] = updatePackageJson.dependencies[moduleName].replace(
|
||||
searchString,
|
||||
latest
|
||||
);
|
||||
}
|
||||
if (updatePackageJson.packageDependencies[moduleName]) {
|
||||
updatePackageJson.packageDependencies[
|
||||
moduleName
|
||||
] = updatePackageJson.packageDependencies[moduleName].replace(
|
||||
new RegExp(installed),
|
||||
latest
|
||||
);
|
||||
}
|
||||
return new Promise((resolve, reject) => {
|
||||
fs.writeFile(
|
||||
packageJsonFilePath,
|
||||
JSON.stringify(updatePackageJson, null, 2),
|
||||
function(err) {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
|
||||
console.log(`Bumped ${moduleName} from ${installed} to ${latest}`);
|
||||
return resolve();
|
||||
}
|
||||
);
|
||||
});
|
||||
},
|
||||
sleep: ms => new Promise(resolve => setTimeout(resolve, ms))
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = util;
|
@ -1,148 +0,0 @@
|
||||
'use strict';
|
||||
|
||||
const childProcess = require('child_process');
|
||||
const path = require('path');
|
||||
|
||||
module.exports = function(ci) {
|
||||
verifyNode();
|
||||
verifyPython();
|
||||
};
|
||||
|
||||
function verifyNode() {
|
||||
const fullVersion = process.versions.node;
|
||||
const majorVersion = fullVersion.split('.')[0];
|
||||
const minorVersion = fullVersion.split('.')[1];
|
||||
if (majorVersion >= 11 || (majorVersion === '10' && minorVersion >= 12)) {
|
||||
console.log(`Node:\tv${fullVersion}`);
|
||||
} else {
|
||||
throw new Error(
|
||||
`node v10.12+ is required to build Pulsar. node v${fullVersion} is installed.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function verifyPython() {
|
||||
// This function essentially re-implements node-gyp's "find-python.js" library,
|
||||
// but in a synchronous, bootstrap-script-friendly way.
|
||||
// It is based off of the logic of the file from node-gyp v5.x:
|
||||
// https://github.com/nodejs/node-gyp/blob/v5.1.1/lib/find-python.js
|
||||
// This node-gyp is the version in use by current npm (in mid 2020).
|
||||
//
|
||||
// TODO: If this repo ships a newer version of node-gyp (v6.x or later), please update this script.
|
||||
// (Currently, the build scripts and apm each depend on npm v6.14, which depends on node-gyp v5.)
|
||||
// Differences between major versions of node-gyp:
|
||||
// node-gyp 5.x looks for python, then python2, then python3.
|
||||
// node-gyp 6.x looks for python3, then python, then python2.)
|
||||
// node-gyp 5.x accepts Python ^2.6 || >= 3.5, node-gyp 6+ only accepts Python == 2.7 || >= 3.5.
|
||||
// node-gyp 7.x stopped using the "-2" flag for "py.exe",
|
||||
// so as to allow finding Python 3 as well, not just Python 2.
|
||||
// https://github.com/nodejs/node-gyp/blob/master/CHANGELOG.md#v700-2020-06-03
|
||||
|
||||
let stdout;
|
||||
let fullVersion;
|
||||
let usablePythonWasFound;
|
||||
let triedLog = '';
|
||||
let binaryPlusFlag;
|
||||
|
||||
function verifyBinary(binary, prependFlag) {
|
||||
if (binary && !usablePythonWasFound) {
|
||||
// clear re-used "result" variables now that we're checking another python binary.
|
||||
stdout = '';
|
||||
fullVersion = '';
|
||||
|
||||
let allFlags = [
|
||||
'-c',
|
||||
'import platform\nprint(platform.python_version())'
|
||||
];
|
||||
if (prependFlag) {
|
||||
// prependFlag is an optional argument,
|
||||
// used to prepend "-2" for the "py.exe" launcher.
|
||||
//
|
||||
// TODO: Refactor this script by eliminating "prependFlag"
|
||||
// once we update to node-gyp v7.x or newer;
|
||||
// the "-2" flag is not used in node-gyp v7.x.
|
||||
allFlags.unshift(prependFlag);
|
||||
}
|
||||
|
||||
try {
|
||||
stdout = childProcess.execFileSync(binary, allFlags, {
|
||||
env: process.env,
|
||||
stdio: ['ignore', 'pipe', 'ignore']
|
||||
});
|
||||
} catch (e) {}
|
||||
|
||||
if (stdout) {
|
||||
if (stdout.indexOf('+') !== -1)
|
||||
stdout = stdout.toString().replace(/\+/g, '');
|
||||
if (stdout.indexOf('rc') !== -1)
|
||||
stdout = stdout.toString().replace(/rc(.*)$/gi, '');
|
||||
fullVersion = stdout.toString().trim();
|
||||
}
|
||||
|
||||
if (fullVersion) {
|
||||
let versionComponents = fullVersion.split('.');
|
||||
let majorVersion = Number(versionComponents[0]);
|
||||
let minorVersion = Number(versionComponents[1]);
|
||||
if (
|
||||
(majorVersion === 2 && minorVersion >= 6) ||
|
||||
(majorVersion === 3 && minorVersion >= 5)
|
||||
) {
|
||||
usablePythonWasFound = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Prepare to log which commands were tried, and the results, in case no usable Python can be found.
|
||||
if (prependFlag) {
|
||||
binaryPlusFlag = binary + ' ' + prependFlag;
|
||||
} else {
|
||||
binaryPlusFlag = binary;
|
||||
}
|
||||
triedLog = triedLog.concat(
|
||||
`log message: tried to check version of "${binaryPlusFlag}", got: "${fullVersion}"\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function verifyForcedBinary(binary) {
|
||||
if (typeof binary !== 'undefined' && binary.length > 0) {
|
||||
verifyBinary(binary);
|
||||
if (!usablePythonWasFound) {
|
||||
throw new Error(
|
||||
`NODE_GYP_FORCE_PYTHON is set to: "${binary}", but this is not a valid Python.\n` +
|
||||
'Please set NODE_GYP_FORCE_PYTHON to something valid, or unset it entirely.\n' +
|
||||
'(Python 2.6, 2.7 or 3.5+ is required to build Pulsar.)\n'
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// These first two checks do nothing if the relevant
|
||||
// environment variables aren't set.
|
||||
verifyForcedBinary(process.env.NODE_GYP_FORCE_PYTHON);
|
||||
// All the following checks will no-op if a previous check has succeeded.
|
||||
verifyBinary(process.env.PYTHON);
|
||||
verifyBinary('python');
|
||||
verifyBinary('python2');
|
||||
verifyBinary('python3');
|
||||
if (process.platform === 'win32') {
|
||||
verifyBinary('py.exe', '-2');
|
||||
verifyBinary(
|
||||
path.join(process.env.SystemDrive || 'C:', 'Python27', 'python.exe')
|
||||
);
|
||||
verifyBinary(
|
||||
path.join(process.env.SystemDrive || 'C:', 'Python37', 'python.exe')
|
||||
);
|
||||
}
|
||||
|
||||
if (usablePythonWasFound) {
|
||||
console.log(`Python:\tv${fullVersion}`);
|
||||
} else {
|
||||
throw new Error(
|
||||
`\n${triedLog}\n` +
|
||||
'Python 2.6, 2.7 or 3.5+ is required to build Pulsar.\n' +
|
||||
'verify-machine-requirements.js was unable to find such a version of Python.\n' +
|
||||
"Set the PYTHON env var to e.g. 'C:/path/to/Python27/python.exe'\n" +
|
||||
'if your Python is installed in a non-default location.\n'
|
||||
);
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user