Merge remote-tracking branch 'refs/remotes/origin/master' into wl-update-languages

This commit is contained in:
Wliu 2016-09-01 11:24:39 -04:00
commit 6c95ef0cd8
135 changed files with 2391 additions and 3809 deletions

2
.gitignore vendored
View File

@ -10,9 +10,9 @@ npm-debug.log
debug.log
/tags
/atom-shell/
/electron/
/out/
docs/output
docs/includes
spec/fixtures/evil-files/
out/
/electron/

3
.npmrc
View File

@ -1,3 +0,0 @@
cache = ~/.atom/.npm
runtime = electron
disturl = https://atom.io/download/atom-shell

View File

@ -1,20 +1,12 @@
git:
depth: 10
branches:
only:
- master
env:
global:
- ATOM_ACCESS_TOKEN=da809a6077bb1b0aa7c5623f7b2d5f1fec2faae4
compiler: clang
matrix:
include:
- os: linux
env: NODE_VERSION=4
env: NODE_VERSION=4.4.7 CXX=g++-5
sudo: false
@ -23,14 +15,18 @@ install:
- source /tmp/.nvm/nvm.sh
- nvm install $NODE_VERSION
- nvm use --delete-prefix $NODE_VERSION
- npm install -g npm
- script/build --create-debian-package --create-rpm-package --compress-artifacts
script: script/cibuild
script: true
cache:
directories:
- electron
- node_modules
- apm/node_modules
- build/node_modules
- script/node_modules
- ~/.atom/compile-cache
notifications:
email:
@ -38,12 +34,21 @@ notifications:
on_failure: change
addons:
artifacts:
paths:
- out/atom-amd64.deb
- out/atom.x86_64.rpm
- out/atom-amd64.tar.gz
target_paths: travis-artifacts/$TRAVIS_BUILD_ID
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- build-essential
- git
- libgnome-keyring-dev
- fakeroot
- gcc-multilib
- gcc-5
- g++-5
- git
- libgnome-keyring-dev
- rpm

View File

@ -1 +0,0 @@
cache = ~/.atom/.npm

View File

@ -10,9 +10,6 @@ platform:
environment:
global:
ATOM_DEV_RESOURCE_PATH: c:\projects\atom
ATOM_ACCESS_TOKEN:
secure: Q7vxmSq0bVCLTTRPzXw5ZhPTe7XYhWxX0tQV6neEkddTH6pZkOYNmSCG6VnMX2f+
ATOM_NOISY_BUILD: 1
matrix:
- NODE_VERSION: 4.4.5
@ -20,23 +17,34 @@ environment:
install:
- SET PATH=C:\Program Files\Atom\resources\cli;%PATH%
- ps: Install-Product node $env:NODE_VERSION $env:PLATFORM
- npm install -g npm
build_script:
- cd %APPVEYOR_BUILD_FOLDER%
- C:\projects\atom\script\cibuild.cmd
- script\build.cmd --code-sign --create-windows-installer --compress-artifacts
test: off
deploy: off
artifacts:
- path: out\**\AtomSetup.exe
- path: out\AtomSetup.exe
name: AtomSetup.exe
- path: out\**\AtomSetup.msi
- path: out\AtomSetup.msi
name: AtomSetup.msi
- path: out\atom-windows.zip
name: atom-windows.zip
- path: out\RELEASES
name: RELEASES
- path: out\atom-*-delta.nupkg
name: atom-delta.nupkg
- path: out\atom-*-full.nupkg
name: atom-full.nupkg
cache:
- '%APPVEYOR_BUILD_FOLDER%\build\node_modules'
- '%APPVEYOR_BUILD_FOLDER%\script\node_modules'
- '%APPVEYOR_BUILD_FOLDER%\apm\node_modules'
- '%APPVEYOR_BUILD_FOLDER%\node_modules'
- '%APPVEYOR_BUILD_FOLDER%\electron'
- '%USERPROFILE%\.atom\.apm'
- '%USERPROFILE%\.atom\.node-gyp\.atom'
- '%USERPROFILE%\.atom\.npm'
- '%USERPROFILE%\.atom\compile-cache'

View File

@ -55,8 +55,10 @@ fi
if [ $OS == 'Mac' ]; then
if [ -n "$BETA_VERSION" ]; then
ATOM_APP_NAME="Atom Beta.app"
ATOM_EXECUTABLE_NAME="Atom Beta"
else
ATOM_APP_NAME="Atom.app"
ATOM_EXECUTABLE_NAME="Atom"
fi
if [ -z "${ATOM_PATH}" ]; then
@ -78,7 +80,7 @@ if [ $OS == 'Mac' ]; then
fi
if [ $EXPECT_OUTPUT ]; then
"$ATOM_PATH/$ATOM_APP_NAME/Contents/MacOS/Atom" --executed-from="$(pwd)" --pid=$$ "$@"
"$ATOM_PATH/$ATOM_APP_NAME/Contents/MacOS/$ATOM_EXECUTABLE_NAME" --executed-from="$(pwd)" --pid=$$ "$@"
exit $?
else
open -a "$ATOM_PATH/$ATOM_APP_NAME" -n --args --executed-from="$(pwd)" --pid=$$ --path-environment="$PATH" "$@"

View File

@ -1 +0,0 @@
cache = ~/.atom/.npm

View File

@ -1,334 +0,0 @@
fs = require 'fs'
path = require 'path'
os = require 'os'
glob = require 'glob'
usesBabel = require './lib/uses-babel'
babelOptions = require '../static/babelrc'
# Add support for obselete APIs of vm module so we can make some third-party
# modules work under node v0.11.x.
require 'vm-compatibility-layer'
_ = require 'underscore-plus'
packageJson = require '../package.json'
module.exports = (grunt) ->
process.env.ATOM_RESOURCE_PATH ?= path.resolve(__dirname, '..')
require('time-grunt')(grunt)
grunt.loadNpmTasks('grunt-babel')
grunt.loadNpmTasks('grunt-coffeelint')
grunt.loadNpmTasks('grunt-lesslint')
grunt.loadNpmTasks('grunt-standard')
grunt.loadNpmTasks('grunt-cson')
grunt.loadNpmTasks('grunt-contrib-csslint')
grunt.loadNpmTasks('grunt-contrib-coffee')
grunt.loadNpmTasks('grunt-contrib-less')
grunt.loadNpmTasks('grunt-shell')
grunt.loadNpmTasks('grunt-download-electron')
grunt.loadNpmTasks('grunt-electron-installer')
grunt.loadNpmTasks('grunt-peg')
grunt.loadTasks('tasks')
# This allows all subsequent paths to the relative to the root of the repo
grunt.file.setBase(path.resolve('..'))
# Options
[defaultChannel, releaseBranch] = getDefaultChannelAndReleaseBranch(packageJson.version)
installDir = grunt.option('install-dir')
buildDir = path.resolve(grunt.option('build-dir') ? 'out')
channel = grunt.option('channel') ? defaultChannel
metadata = packageJson
appName = packageJson.productName
appFileName = packageJson.name
apmFileName = 'apm'
if channel is 'beta'
appName += ' Beta'
appFileName += '-beta'
apmFileName += '-beta'
appName += '.app' if process.platform is 'darwin'
shellAppDir = path.join(buildDir, appName)
symbolsDir = path.join(buildDir, 'Atom.breakpad.syms')
if process.platform is 'win32'
homeDir = process.env.USERPROFILE
contentsDir = shellAppDir
appDir = path.join(shellAppDir, 'resources', 'app')
installDir ?= path.join(process.env.LOCALAPPDATA, appName, 'app-dev')
killCommand = 'taskkill /F /IM atom.exe'
else if process.platform is 'darwin'
homeDir = process.env.HOME
contentsDir = path.join(shellAppDir, 'Contents')
appDir = path.join(contentsDir, 'Resources', 'app')
installDir ?= path.join('/Applications', appName)
killCommand = 'pkill -9 Atom'
else
homeDir = process.env.HOME
contentsDir = shellAppDir
appDir = path.join(shellAppDir, 'resources', 'app')
installDir ?= process.env.INSTALL_PREFIX ? '/usr/local'
killCommand ='pkill -9 atom'
installDir = path.resolve(installDir)
electronDownloadDir = path.join(homeDir, '.atom', 'electron')
coffeeConfig =
glob_to_multiple:
expand: true
src: [
'src/**/*.coffee'
'spec/*.coffee'
'!spec/*-spec.coffee'
'static/**/*.coffee'
]
dest: appDir
ext: '.js'
babelConfig =
options: babelOptions
dist:
files: []
lessConfig =
options:
paths: [
'static/variables'
'static'
]
glob_to_multiple:
expand: true
src: [
'static/**/*.less'
]
dest: appDir
ext: '.css'
prebuildLessConfig =
options:
cachePath: path.join(homeDir, '.atom', 'compile-cache', 'prebuild-less', require('less-cache/package.json').version)
src: [
'static/**/*.less'
]
csonConfig =
options:
rootObject: true
cachePath: path.join(homeDir, '.atom', 'compile-cache', 'grunt-cson')
glob_to_multiple:
expand: true
src: [
'menus/*.cson'
'keymaps/*.cson'
'static/**/*.cson'
]
dest: appDir
ext: '.json'
pegConfig =
glob_to_multiple:
expand: true
src: ['src/**/*.pegjs']
dest: appDir
ext: '.js'
for jsFile in glob.sync("src/**/*.js")
if usesBabel(jsFile)
babelConfig.dist.files.push({
src: [jsFile]
dest: path.join(appDir, jsFile)
})
for jsFile in glob.sync("exports/**/*.js")
if usesBabel(jsFile)
babelConfig.dist.files.push({
src: [jsFile]
dest: path.join(appDir, jsFile)
})
for child in fs.readdirSync('node_modules') when child isnt '.bin'
directory = path.join('node_modules', child)
metadataPath = path.join(directory, 'package.json')
continue unless grunt.file.isFile(metadataPath)
{engines, theme} = grunt.file.readJSON(metadataPath)
if engines?.atom?
coffeeConfig.glob_to_multiple.src.push("#{directory}/**/*.coffee")
coffeeConfig.glob_to_multiple.src.push("!#{directory}/spec/**/*.coffee")
lessConfig.glob_to_multiple.src.push("#{directory}/**/*.less")
lessConfig.glob_to_multiple.src.push("!#{directory}/spec/**/*.less")
unless theme
prebuildLessConfig.src.push("#{directory}/**/*.less")
prebuildLessConfig.src.push("!#{directory}/spec/**/*.less")
csonConfig.glob_to_multiple.src.push("#{directory}/**/*.cson")
csonConfig.glob_to_multiple.src.push("!#{directory}/spec/**/*.cson")
pegConfig.glob_to_multiple.src.push("#{directory}/lib/*.pegjs")
for jsFile in glob.sync("#{directory}/lib/**/*.js")
if usesBabel(jsFile)
babelConfig.dist.files.push({
src: [jsFile]
dest: path.join(appDir, jsFile)
})
windowsInstallerConfig =
grunt.initConfig
pkg: grunt.file.readJSON('package.json')
atom: {
appName, channel, metadata, releaseBranch,
appFileName, apmFileName,
appDir, buildDir, contentsDir, installDir, shellAppDir, symbolsDir,
}
docsOutputDir: 'docs/output'
babel: babelConfig
coffee: coffeeConfig
less: lessConfig
'prebuild-less': prebuildLessConfig
cson: csonConfig
peg: pegConfig
coffeelint:
options:
configFile: 'coffeelint.json'
src: [
'dot-atom/**/*.coffee'
'src/**/*.coffee'
]
build: [
'build/tasks/**/*.coffee'
'build/Gruntfile.coffee'
]
test: [
'spec/*.coffee'
]
standard:
src: [
'exports/**/*.js'
'src/**/*.js'
'static/*.js'
]
csslint:
options:
'adjoining-classes': false
'duplicate-background-images': false
'box-model': false
'box-sizing': false
'bulletproof-font-face': false
'compatible-vendor-prefixes': false
'display-property-grouping': false
'fallback-colors': false
'font-sizes': false
'gradients': false
'ids': false
'important': false
'known-properties': false
'outline-none': false
'overqualified-elements': false
'qualified-headings': false
'unique-headings': false
'universal-selector': false
'vendor-prefix': false
src: [
'static/**/*.css'
]
lesslint:
src: [
'static/**/*.less'
]
'download-electron':
version: packageJson.electronVersion
outputDir: 'electron'
downloadDir: electronDownloadDir
rebuild: true # rebuild native modules after electron is updated
token: process.env.ATOM_ACCESS_TOKEN ? 'da809a6077bb1b0aa7c5623f7b2d5f1fec2faae4'
'create-windows-installer':
installer:
appDirectory: shellAppDir
outputDirectory: path.join(buildDir, 'installer')
authors: 'GitHub Inc.'
loadingGif: path.resolve(__dirname, '..', 'resources', 'win', 'loading.gif')
iconUrl: "https://raw.githubusercontent.com/atom/atom/master/resources/app-icons/#{channel}/atom.ico"
setupIcon: path.resolve(__dirname, '..', 'resources', 'app-icons', channel, 'atom.ico')
remoteReleases: "https://atom.io/api/updates?version=#{metadata.version}"
shell:
'kill-atom':
command: killCommand
options:
stdout: false
stderr: false
failOnError: false
grunt.registerTask('compile', ['babel', 'coffee', 'prebuild-less', 'cson', 'peg'])
grunt.registerTask('lint', ['standard', 'coffeelint', 'csslint', 'lesslint'])
grunt.registerTask('test', ['shell:kill-atom', 'run-specs'])
ciTasks = []
ciTasks.push('output-disk-space') unless process.env.CI
ciTasks.push('download-electron')
ciTasks.push('download-electron-chromedriver')
ciTasks.push('build')
ciTasks.push('fingerprint')
ciTasks.push('dump-symbols') if process.platform is 'darwin'
ciTasks.push('set-version', 'check-licenses', 'lint', 'generate-asar')
ciTasks.push('mkdeb') if process.platform is 'linux'
ciTasks.push('mktar') if process.platform is 'linux'
ciTasks.push('test') if process.platform is 'darwin'
ciTasks.push('codesign:app') if process.platform is 'darwin' and not process.env.CI
if process.platform is 'win32'
ciTasks.push('codesign:exe') if process.env.JANKY_SIGNTOOL
ciTasks.push('codesign:installer-deferred') if not process.env.JANKY_SIGNTOOL
ciTasks.push('create-windows-installer:installer')
ciTasks.push('codesign:installer') if process.env.JANKY_SIGNTOOL
ciTasks.push('codesign:cleanup')
if process.env.ATOM_PUBLISH_REPO or not process.env.CI
ciTasks.push('publish-build')
grunt.registerTask('ci', ciTasks)
defaultTasks = ['download-electron', 'download-electron-chromedriver', 'build', 'set-version', 'generate-asar']
unless process.platform is 'linux' or grunt.option('no-install')
defaultTasks.push 'install'
grunt.registerTask('default', defaultTasks)
grunt.registerTask('build-and-sign', ['download-electron', 'download-electron-chromedriver', 'build', 'set-version', 'generate-asar', 'codesign:app', 'install'])
getDefaultChannelAndReleaseBranch = (version) ->
if version.match(/dev/) or isBuildingPR()
channel = 'dev'
releaseBranch = null
else
if version.match(/beta/)
channel = 'beta'
else
channel = 'stable'
minorVersion = version.match(/^\d+\.\d+/)[0]
releaseBranch = "#{minorVersion}-releases"
[channel, releaseBranch]
isBuildingPR = ->
process.env.APPVEYOR_PULL_REQUEST_NUMBER? or process.env.TRAVIS_PULL_REQUEST?

View File

@ -1,10 +0,0 @@
# Atom Build
This folder contains the grunt configuration and tasks to build Atom.
It was moved from the root of the repository so that any native modules used
would be compiled against node's v8 headers since anything stored in
`node_modules` at the root of the repo is compiled against atom's v8 headers.
New build dependencies should be added to the `package.json` file located in
this folder.

Binary file not shown.

View File

@ -1,50 +0,0 @@
# VERSION: 0.1
# DESCRIPTION: Create the atom editor in a container
# AUTHOR: Jessica Frazelle <jessie@docker.com>
# COMMENTS:
# This file describes how to build the atom editor
# in a container with all dependencies installed.
# Tested on Debian Jessie.
# USAGE:
# # Download atom Dockerfile
# wget https://raw.githubusercontent.com/atom/atom/master/Dockerfile
#
# # Build atom image
# docker build -t atom .
#
# docker run -v /tmp/.X11-unix:/tmp/.X11-unix \
# -e DISPLAY=unix$DISPLAY atom
#
DOCKER-VERSION 1.3
# Base docker image
FROM debian:jessie
MAINTAINER Jessica Frazelle <jessie@docker.com>
# Install dependencies
RUN apt-get update && apt-get install -y \
build-essential \
ca-certificates \
curl \
git \
libasound2 \
libgconf-2-4 \
libgnome-keyring-dev \
libgtk2.0-0 \
libnss3 \
libxtst6 \
--no-install-recommends
# install node
RUN curl -sL https://deb.nodesource.com/setup | bash -
RUN apt-get install -y nodejs
# clone atom
RUN git clone https://github.com/atom/atom /src
WORKDIR /src
RUN git fetch && git checkout $(git describe --tags `git rev-list --tags --max-count=1`)
RUN script/build && script/grunt install
# Autorun atom
CMD /usr/local/bin/atom --foreground --log-file /var/log/atom.log && tail -f /var/log/atom.log

View File

@ -1,19 +0,0 @@
fs = require 'fs'
BABEL_PREFIXES = [
"'use babel'"
'"use babel"'
'/** @babel */'
'/* @flow */'
]
PREFIX_LENGTH = Math.max(BABEL_PREFIXES.map((prefix) -> prefix.length)...)
buffer = Buffer(PREFIX_LENGTH)
module.exports = (filename) ->
file = fs.openSync(filename, 'r')
fs.readSync(file, buffer, 0, PREFIX_LENGTH)
fs.closeSync(file)
BABEL_PREFIXES.some (prefix) ->
prefix is buffer.toString('utf8', 0, prefix.length)

View File

@ -1,50 +0,0 @@
{
"name": "atom-build",
"description": "Atom build",
"repository": {
"type": "git",
"url": "https://github.com/atom/atom.git"
},
"dependencies": {
"asar": "^0.8.0",
"async": "~0.2.9",
"aws-sdk": "^2.2.18",
"babel-eslint": "^5.0.0-beta6",
"donna": "^1.0.13",
"escope": "~3.3.0",
"formidable": "~1.0.14",
"fs-plus": "2.x",
"github-releases": "~0.3.1",
"glob": "^5.0.14",
"grunt": "~0.4.1",
"grunt-babel": "^5.0.1",
"grunt-cli": "~0.1.9",
"grunt-coffeelint": "git+https://github.com/atom/grunt-coffeelint.git#cfb99aa99811d52687969532bd5a98011ed95bfe",
"grunt-contrib-coffee": "~0.12.0",
"grunt-contrib-csslint": "~0.2.0",
"grunt-contrib-less": "~0.8.0",
"grunt-cson": "0.16.0",
"grunt-download-electron": "^2.1.1",
"grunt-electron-installer": "1.2.2",
"grunt-lesslint": "0.17.0",
"grunt-peg": "~1.1.0",
"grunt-shell": "~0.3.1",
"grunt-standard": "^2.0.0",
"joanna": "0.0.6",
"legal-eagle": "~0.13.0",
"minidump": "~0.9",
"npm": "3.10.5",
"rcedit": "~0.3.0",
"request": "~2.27.0",
"rimraf": "~2.2.2",
"runas": "^3.1",
"standard": "^5.4.1",
"tello": "1.0.5",
"temp": "~0.8.1",
"time-grunt": "1.2.2",
"underscore-plus": "1.x",
"unzip": "~0.1.9",
"vm-compatibility-layer": "~0.1.0",
"webdriverio": "^2.4.5"
}
}

View File

@ -1,188 +0,0 @@
fs = require 'fs'
path = require 'path'
_ = require 'underscore-plus'
module.exports = (grunt) ->
{cp, isAtomPackage, mkdir, rm} = require('./task-helpers')(grunt)
grunt.registerTask 'build', 'Build the application', ->
shellAppDir = grunt.config.get('atom.shellAppDir')
buildDir = grunt.config.get('atom.buildDir')
appDir = grunt.config.get('atom.appDir')
rm shellAppDir
rm path.join(buildDir, 'installer')
mkdir path.dirname(buildDir)
if process.platform is 'darwin'
cp 'electron/Electron.app', shellAppDir, filter: /default_app/
fs.renameSync path.join(shellAppDir, 'Contents', 'MacOS', 'Electron'), path.join(shellAppDir, 'Contents', 'MacOS', 'Atom')
fs.renameSync path.join(shellAppDir, 'Contents', 'Frameworks', 'Electron Helper.app'), path.join(shellAppDir, 'Contents', 'Frameworks', 'Atom Helper.app')
fs.renameSync path.join(shellAppDir, 'Contents', 'Frameworks', 'Atom Helper.app', 'Contents', 'MacOS', 'Electron Helper'), path.join(shellAppDir, 'Contents', 'Frameworks', 'Atom Helper.app', 'Contents', 'MacOS', 'Atom Helper')
else
cp 'electron', shellAppDir, filter: /default_app/
if process.platform is 'win32'
fs.renameSync path.join(shellAppDir, 'electron.exe'), path.join(shellAppDir, 'atom.exe')
else
fs.renameSync path.join(shellAppDir, 'electron'), path.join(shellAppDir, 'atom')
mkdir appDir
if process.platform isnt 'win32'
cp 'atom.sh', path.resolve(appDir, '..', 'new-app', 'atom.sh')
cp 'package.json', path.join(appDir, 'package.json')
packageNames = []
packageDirectories = []
nonPackageDirectories = [
'dot-atom'
'vendor'
]
{devDependencies} = grunt.file.readJSON('package.json')
for child in fs.readdirSync('node_modules')
directory = path.join('node_modules', child)
if isAtomPackage(directory)
packageDirectories.push(directory)
packageNames.push(child)
else
nonPackageDirectories.push(directory)
# Put any paths here that shouldn't end up in the built Atom.app
# so that it doesn't becomes larger than it needs to be.
ignoredPaths = [
path.join('git-utils', 'deps')
path.join('ohnogit', 'node_modules', 'nodegit', 'vendor')
path.join('ohnogit', 'node_modules', 'nodegit', 'node_modules', 'node-pre-gyp')
path.join('ohnogit', 'node_modules', 'nodegit', 'node_modules', '.bin')
path.join('oniguruma', 'deps')
path.join('less', 'dist')
path.join('npm', 'doc')
path.join('npm', 'html')
path.join('npm', 'man')
path.join('npm', 'node_modules', '.bin', 'beep')
path.join('npm', 'node_modules', '.bin', 'clear')
path.join('npm', 'node_modules', '.bin', 'starwars')
path.join('pegjs', 'examples')
path.join('get-parameter-names', 'node_modules', 'testla')
path.join('get-parameter-names', 'node_modules', '.bin', 'testla')
path.join('jasmine-reporters', 'ext')
path.join('jasmine-node', 'node_modules', 'gaze')
path.join('jasmine-node', 'spec')
path.join('node_modules', 'nan')
path.join('node_modules', 'native-mate')
path.join('build', 'binding.Makefile')
path.join('build', 'config.gypi')
path.join('build', 'gyp-mac-tool')
path.join('build', 'Makefile')
path.join('build', 'Release', 'obj.target')
path.join('build', 'Release', 'obj')
path.join('build', 'Release', '.deps')
path.join('vendor', 'apm')
# These are only require in dev mode when the grammar isn't precompiled
path.join('snippets', 'node_modules', 'loophole')
path.join('snippets', 'node_modules', 'pegjs')
path.join('snippets', 'node_modules', '.bin', 'pegjs')
'.DS_Store'
'.jshintrc'
'.npmignore'
'.pairs'
'.travis.yml'
'appveyor.yml'
'.idea'
'.editorconfig'
'.lint'
'.lintignore'
'.eslintrc'
'.jshintignore'
'coffeelint.json'
'.coffeelintignore'
'.gitattributes'
'.gitkeep'
]
packageNames.forEach (packageName) -> ignoredPaths.push(path.join(packageName, 'spec'))
ignoredPaths = ignoredPaths.map (ignoredPath) -> _.escapeRegExp(ignoredPath)
# Add .* to avoid matching hunspell_dictionaries.
ignoredPaths.push "#{_.escapeRegExp(path.join('spellchecker', 'vendor', 'hunspell') + path.sep)}.*"
ignoredPaths.push "#{_.escapeRegExp(path.join('build', 'Release') + path.sep)}.*\\.pdb"
# Ignore *.cc and *.h files from native modules
ignoredPaths.push "#{_.escapeRegExp(path.join('ctags', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('git-utils', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('ohnogit', 'node_modules', 'nodegit', 'src') + path.sep)}.*\\.(cc|h)?"
ignoredPaths.push "#{_.escapeRegExp(path.join('ohnogit', 'node_modules', 'nodegit', 'generate') + path.sep)}.*\\.(cc|h)?"
ignoredPaths.push "#{_.escapeRegExp(path.join('ohnogit', 'node_modules', 'nodegit', 'include') + path.sep)}.*\\.(cc|h)?"
ignoredPaths.push "#{_.escapeRegExp(path.join('keytar', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('nslog', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('oniguruma', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('pathwatcher', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('runas', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('scrollbar-style', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('spellchecker', 'src') + path.sep)}.*\\.(cc|h)*"
ignoredPaths.push "#{_.escapeRegExp(path.join('cached-run-in-this-context', 'src') + path.sep)}.*\\.(cc|h)?"
ignoredPaths.push "#{_.escapeRegExp(path.join('keyboard-layout', 'src') + path.sep)}.*\\.(cc|h|mm)*"
# Ignore build files
ignoredPaths.push "#{_.escapeRegExp(path.sep)}binding\\.gyp$"
ignoredPaths.push "#{_.escapeRegExp(path.sep)}.+\\.target.mk$"
ignoredPaths.push "#{_.escapeRegExp(path.sep)}linker\\.lock$"
ignoredPaths.push "#{_.escapeRegExp(path.join('build', 'Release') + path.sep)}.+\\.node\\.dSYM"
# Hunspell dictionaries are only not needed on macOS.
if process.platform is 'darwin'
ignoredPaths.push path.join('spellchecker', 'vendor', 'hunspell_dictionaries')
ignoredPaths = ignoredPaths.map (ignoredPath) -> "(#{ignoredPath})"
testFolderPattern = new RegExp("#{_.escapeRegExp(path.sep)}_*te?sts?_*#{_.escapeRegExp(path.sep)}")
exampleFolderPattern = new RegExp("#{_.escapeRegExp(path.sep)}examples?#{_.escapeRegExp(path.sep)}")
nodeModulesFilter = new RegExp(ignoredPaths.join('|'))
filterNodeModule = (pathToCopy) ->
pathToCopy = path.resolve(pathToCopy)
nodeModulesFilter.test(pathToCopy) or testFolderPattern.test(pathToCopy) or exampleFolderPattern.test(pathToCopy)
packageFilter = new RegExp("(#{ignoredPaths.join('|')})|(.+\\.(cson|coffee)$)")
filterPackage = (pathToCopy) ->
pathToCopy = path.resolve(pathToCopy)
packageFilter.test(pathToCopy) or testFolderPattern.test(pathToCopy) or exampleFolderPattern.test(pathToCopy)
for directory in nonPackageDirectories
cp directory, path.join(appDir, directory), filter: filterNodeModule
for directory in packageDirectories
cp directory, path.join(appDir, directory), filter: filterPackage
cp 'src', path.join(appDir, 'src'), filter: /.+\.(cson|coffee)$/
cp 'static', path.join(appDir, 'static')
cp path.join('apm', 'node_modules', 'atom-package-manager'), path.resolve(appDir, '..', 'new-app', 'apm'), filter: filterNodeModule
if process.platform isnt 'win32'
fs.symlinkSync(path.join('..', '..', 'bin', 'apm'), path.resolve(appDir, '..', 'new-app', 'apm', 'node_modules', '.bin', 'apm'))
channel = grunt.config.get('atom.channel')
cp path.join('resources', 'app-icons', channel, 'png', '1024.png'), path.join(appDir, 'resources', 'atom.png')
if process.platform is 'darwin'
cp path.join('resources', 'app-icons', channel, 'atom.icns'), path.resolve(appDir, '..', 'atom.icns')
cp path.join('resources', 'mac', 'file.icns'), path.resolve(appDir, '..', 'file.icns')
cp path.join('resources', 'mac', 'speakeasy.pem'), path.resolve(appDir, '..', 'speakeasy.pem')
if process.platform is 'win32'
[ 'atom.cmd', 'atom.sh', 'atom.js', 'apm.cmd', 'apm.sh', 'file.ico' ]
.forEach (file) -> cp path.join('resources', 'win', file), path.join(shellAppDir, 'resources', 'cli', file)
if process.platform is 'linux'
cp path.join('resources', 'app-icons', channel, 'png'), path.join(buildDir, 'icons')
dependencies = ['compile', 'generate-license:save', 'generate-module-cache', 'compile-packages-slug']
dependencies.push('copy-info-plist') if process.platform is 'darwin'
dependencies.push('set-exe-icon') if process.platform is 'win32'
grunt.task.run(dependencies...)

View File

@ -1,27 +0,0 @@
module.exports = (grunt) ->
grunt.registerTask 'check-licenses', 'Report the licenses of all dependencies', ->
legalEagle = require 'legal-eagle'
{size, keys} = require 'underscore-plus'
done = @async()
options =
path: process.cwd()
omitPermissive: true
overrides: require './license-overrides'
legalEagle options, (err, summary) ->
if err?
console.error(err)
process.exit 1
for key of summary
delete summary[key] if key.match /^atom@/
if size(summary)
console.error "Found dependencies without permissive licenses:"
for name in keys(summary).sort()
console.error "#{name}"
console.error " License: #{summary[name].license}"
console.error " Repository: #{summary[name].repository}"
process.exit 1
done()

View File

@ -1,22 +0,0 @@
path = require 'path'
os = require 'os'
module.exports = (grunt) ->
{rm} = require('./task-helpers')(grunt)
grunt.registerTask 'partial-clean', 'Delete some of the build files', ->
tmpdir = os.tmpdir()
rm grunt.config.get('atom.buildDir')
rm require('../src/coffee-cache').cacheDir
rm require('../src/less-compile-cache').cacheDir
rm path.join(tmpdir, 'atom-cached-atom-shells')
rm 'atom-shell'
rm 'electron'
grunt.registerTask 'clean', 'Delete all the build files', ->
homeDir = process.env[if process.platform is 'win32' then 'USERPROFILE' else 'HOME']
rm 'node_modules'
rm path.join(homeDir, '.atom', '.node-gyp')
grunt.task.run('partial-clean')

View File

@ -1,89 +0,0 @@
path = require 'path'
fs = require 'fs'
request = require 'request'
module.exports = (grunt) ->
{spawn} = require('./task-helpers')(grunt)
# macOS code signing
grunt.registerTask 'codesign:app', 'CodeSign Atom.app', ->
done = @async()
unlockKeychain (error) ->
return done(error) if error?
args = ['--deep', '--force', '--verbose', '--sign', 'Developer ID Application: GitHub', grunt.config.get('atom.shellAppDir')]
spawn {cmd: 'codesign', args: args}, (error) -> done(error)
unlockKeychain = (callback) ->
return callback() unless process.env.XCODE_KEYCHAIN
{XCODE_KEYCHAIN_PASSWORD, XCODE_KEYCHAIN} = process.env
args = ['unlock-keychain', '-p', XCODE_KEYCHAIN_PASSWORD, XCODE_KEYCHAIN]
spawn {cmd: 'security', args: args}, (error) -> callback(error)
# Windows code signing
grunt.registerTask 'codesign:exe', 'CodeSign Windows binaries', ->
done = @async()
atomExePath = path.join(grunt.config.get('atom.shellAppDir'), 'atom.exe')
signWindowsExecutable atomExePath, (error) ->
return done(error) if error?
updateExePath = path.resolve(__dirname, '..', 'node_modules', 'grunt-electron-installer', 'vendor', 'Update.exe')
signWindowsExecutable updateExePath, (error) -> done(error)
grunt.registerTask 'codesign:installer', 'CodeSign Windows installer (AtomSetup.exe)', ->
done = @async()
atomSetupExePath = path.resolve(grunt.config.get('atom.buildDir'), 'installer', 'AtomSetup.exe')
signWindowsExecutable atomSetupExePath, (error) -> done(error)
grunt.registerTask 'codesign:installer-deferred', 'Obtain cert and configure installer to perform CodeSign', ->
done = @async()
getCertificate (file, password) ->
grunt.config('create-windows-installer.installer.certificateFile', file)
grunt.config('create-windows-installer.installer.certificatePassword', password)
grunt.log.ok('Certificate ready for create-windows-installer task')
done()
grunt.registerTask 'codesign:cleanup', 'Clean up any temporary or downloaded files used for CodeSign', ->
try fs.unlinkSync(downloadedCertificateFile) catch e then return
downloadedCertificateFile = path.resolve(__dirname, 'DownloadedCertFile.p12')
signWindowsExecutable = (exeToSign, callback) ->
if process.env.JANKY_SIGNTOOL
signUsingJanky exeToSign, callback
else
signUsingWindowsSDK exeToSign, callback
signUsingJanky = (exeToSign, callback) ->
grunt.log.ok("Signing #{exeToSign} using Janky SignTool")
spawn {cmd: process.env.JANKY_SIGNTOOL, args: [exeToSign]}, callback
signUsingWindowsSDK = (exeToSign, callback) ->
getCertificate (file, password) ->
signUsingWindowsSDKTool exeToSign, file, password, callback
signUsingWindowsSDKTool = (exeToSign, certificateFile, certificatePassword, callback) ->
grunt.log.ok("Signing '#{exeToSign}' using Windows SDK")
args = ['sign', '/v', '/p', certificatePassword, '/f', certificateFile, exeToSign]
spawn {cmd: 'C:\\Program Files (x86)\\Microsoft SDKs\\Windows\\v7.1A\\bin\\signtool.exe', args: args}, callback
getCertificate = (callback) ->
if process.env.WIN_P12KEY_URL?
grunt.log.ok("Obtaining certificate file")
downloadFile process.env.WIN_P12KEY_URL, downloadedCertificateFile, (done) ->
callback(downloadedCertificateFile, process.env.WIN_P12KEY_PASSWORD ? 'password')
else
callback(path.resolve(__dirname, '..', 'certs', 'AtomDevTestSignKey.p12'), process.env.WIN_P12KEY_PASSWORD ? 'password')
downloadFile = (sourceUrl, targetPath, callback) ->
options = {
url: sourceUrl
headers: {
'User-Agent': 'Atom Signing Key build task',
'Accept': 'application/vnd.github.VERSION.raw'
}
}
request(options)
.pipe(fs.createWriteStream(targetPath))
.on('finish', callback)

View File

@ -1,93 +0,0 @@
path = require 'path'
CSON = require 'season'
fs = require 'fs-plus'
_ = require 'underscore-plus'
normalizePackageData = require 'normalize-package-data'
semver = require 'semver'
OtherPlatforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32'].filter (platform) -> platform isnt process.platform
module.exports = (grunt) ->
{spawn} = require('./task-helpers')(grunt)
getMenu = (appDir) ->
menusPath = path.join(appDir, 'menus')
menuPath = path.join(menusPath, "#{process.platform}.json")
menu = CSON.readFileSync(menuPath) if fs.isFileSync(menuPath)
menu
getKeymaps = (appDir) ->
keymapsPath = path.join(appDir, 'keymaps')
keymaps = {}
for keymapPath in fs.listSync(keymapsPath, ['.json'])
name = path.basename(keymapPath, path.extname(keymapPath))
continue unless OtherPlatforms.indexOf(name) is -1
keymap = CSON.readFileSync(keymapPath)
keymaps[path.basename(keymapPath)] = keymap
keymaps
grunt.registerTask 'compile-packages-slug', 'Add bundled package metadata information to the main package.json file', ->
appDir = fs.realpathSync(grunt.config.get('atom.appDir'))
modulesDirectory = path.join(appDir, 'node_modules')
packages = {}
invalidPackages = false
for moduleDirectory in fs.listSync(modulesDirectory)
continue if path.basename(moduleDirectory) is '.bin'
metadataPath = path.join(moduleDirectory, 'package.json')
continue unless fs.existsSync(metadataPath)
metadata = grunt.file.readJSON(metadataPath)
continue unless metadata?.engines?.atom?
reportPackageError = (msg) ->
invalidPackages = true
grunt.log.error("#{metadata.name}: #{msg}")
normalizePackageData metadata, reportPackageError, true
if metadata.repository?.type is 'git'
metadata.repository.url = metadata.repository.url?.replace(/^git\+/, '')
moduleCache = metadata._atomModuleCache ? {}
_.remove(moduleCache.extensions?['.json'] ? [], 'package.json')
for property in ['_from', '_id', 'dist', 'readme', 'readmeFilename']
delete metadata[property]
pack = {metadata, keymaps: {}, menus: {}}
if metadata.main
mainPath = require.resolve(path.resolve(moduleDirectory, metadata.main))
pack.main = path.relative(appDir, mainPath)
keymapsPath = path.join(moduleDirectory, 'keymaps')
for keymapPath in fs.listSync(keymapsPath, ['.cson', '.json'])
relativePath = path.relative(appDir, keymapPath)
pack.keymaps[relativePath] = CSON.readFileSync(keymapPath)
menusPath = path.join(moduleDirectory, 'menus')
for menuPath in fs.listSync(menusPath, ['.cson', '.json'])
relativePath = path.relative(appDir, menuPath)
pack.menus[relativePath] = CSON.readFileSync(menuPath)
packages[metadata.name] = pack
for extension, paths of moduleCache.extensions
delete moduleCache.extensions[extension] if paths.length is 0
metadata = grunt.file.readJSON(path.join(appDir, 'package.json'))
metadata._atomPackages = packages
metadata._atomMenu = getMenu(appDir)
metadata._atomKeymaps = getKeymaps(appDir)
metadata._deprecatedPackages = require('../deprecated-packages')
for name, {version} of metadata._deprecatedPackages
if version and not semver.validRange(version)
invalidPackages = true
grunt.log.error("Invalid range: #{version} (#{name})")
grunt.file.write(path.join(appDir, 'package.json'), JSON.stringify(metadata))
not invalidPackages

View File

@ -1,13 +0,0 @@
path = require 'path'
module.exports = (grunt) ->
{cp} = require('./task-helpers')(grunt)
grunt.registerTask 'copy-info-plist', 'Copy plist', ->
contentsDir = grunt.config.get('atom.contentsDir')
plistPath = path.join(contentsDir, 'Info.plist')
helperPlistPath = path.join(contentsDir, 'Frameworks/Atom Helper.app/Contents/Info.plist')
# Copy custom plist files
cp 'resources/mac/atom-Info.plist', plistPath
cp 'resources/mac/helper-Info.plist', helperPlistPath

View File

@ -1,50 +0,0 @@
path = require 'path'
fs = require 'fs-plus'
_ = require 'underscore-plus'
donna = require 'donna'
joanna = require 'joanna'
tello = require 'tello'
glob = require 'glob'
module.exports = (grunt) ->
getClassesToInclude = ->
modulesPath = path.resolve(__dirname, '..', '..', 'node_modules')
classes = {}
fs.traverseTreeSync modulesPath, (modulePath) ->
return false if modulePath.match(/node_modules/g).length > 1 # dont need the dependencies of the dependencies
return true unless path.basename(modulePath) is 'package.json'
return true unless fs.isFileSync(modulePath)
apiPath = path.join(path.dirname(modulePath), 'api.json')
if fs.isFileSync(apiPath)
_.extend(classes, grunt.file.readJSON(apiPath).classes)
true
classes
sortClasses = (classes) ->
sortedClasses = {}
for className in Object.keys(classes).sort()
sortedClasses[className] = classes[className]
sortedClasses
grunt.registerTask 'build-docs', 'Builds the API docs in src', ->
docsOutputDir = grunt.config.get('docsOutputDir')
[coffeeMetadata] = donna.generateMetadata(['.'])
jsMetadata = joanna(glob.sync('src/*.js'))
metadata = {
repository: coffeeMetadata.repository,
version: coffeeMetadata.version,
files: Object.assign(coffeeMetadata.files, jsMetadata.files)
}
api = tello.digest([metadata])
_.extend(api.classes, getClassesToInclude())
api.classes = sortClasses(api.classes)
apiJson = JSON.stringify(api, null, 2)
apiJsonPath = path.join(docsOutputDir, 'api.json')
grunt.file.write(apiJsonPath, apiJson)

View File

@ -1,39 +0,0 @@
async = require 'async'
fs = require 'fs-plus'
path = require 'path'
minidump = require 'minidump'
module.exports = (grunt) ->
{mkdir, rm} = require('./task-helpers')(grunt)
dumpSymbolTo = (binaryPath, targetDirectory, callback) ->
minidump.dumpSymbol binaryPath, (error, content) ->
return callback(error) if error?
moduleLine = /MODULE [^ ]+ [^ ]+ ([0-9A-F]+) (.*)\n/.exec(content)
if moduleLine.length isnt 3
return callback("Invalid output when dumping symbol for #{binaryPath}")
filename = moduleLine[2]
targetPathDirname = path.join(targetDirectory, filename, moduleLine[1])
mkdir targetPathDirname
targetPath = path.join(targetPathDirname, "#{filename}.sym")
fs.writeFile(targetPath, content, callback)
grunt.registerTask 'dump-symbols', 'Dump symbols for native modules', ->
done = @async()
symbolsDir = grunt.config.get('atom.symbolsDir')
rm symbolsDir
mkdir symbolsDir
tasks = []
onFile = (binaryPath) ->
if /.*\.node$/.test(binaryPath)
tasks.push(dumpSymbolTo.bind(this, binaryPath, symbolsDir))
onDirectory = ->
true
fs.traverseTreeSync 'node_modules', onFile, onDirectory
async.parallel tasks, done

View File

@ -1,7 +0,0 @@
var fingerprint = require('../../script/utils/fingerprint')
module.exports = function (grunt) {
grunt.registerTask('fingerprint', 'Fingerpint the node_modules folder for caching on CI', function () {
fingerprint.writeFingerprint()
})
}

View File

@ -1,37 +0,0 @@
asar = require 'asar'
fs = require 'fs'
path = require 'path'
module.exports = (grunt) ->
{cp, rm} = require('./task-helpers')(grunt)
grunt.registerTask 'generate-asar', 'Generate asar archive for the app', ->
done = @async()
unpack = [
'*.node'
'ctags-config'
'ctags-darwin'
'ctags-linux'
'ctags-win32.exe'
'**/node_modules/spellchecker/**'
'**/resources/atom.png'
]
unpack = "{#{unpack.join(',')}}"
appDir = grunt.config.get('atom.appDir')
unless fs.existsSync(appDir)
grunt.log.error 'The app has to be built before generating asar archive.'
return done(false)
asar.createPackageWithOptions appDir, path.resolve(appDir, '..', 'app.asar'), {unpack}, (err) ->
return done(err) if err?
rm appDir
fs.renameSync path.resolve(appDir, '..', 'new-app'), appDir
ctagsFolder = path.join("#{appDir}.asar.unpacked", 'node_modules', 'symbols-view', 'vendor')
for ctagsFile in fs.readdirSync(ctagsFolder)
fs.chmodSync(path.join(ctagsFolder, ctagsFile), "755")
done()

View File

@ -1,46 +0,0 @@
fs = require 'fs'
path = require 'path'
module.exports = (grunt) ->
grunt.registerTask 'generate-license', 'Generate the license, including the licenses of all dependencies', (mode) ->
legalEagle = require 'legal-eagle'
done = @async()
options =
path: process.cwd()
overrides: require './license-overrides'
legalEagle options, (err, dependencyLicenses) ->
if err?
console.error(err)
exit 1
licenseText = getLicenseText(dependencyLicenses)
if mode is 'save'
targetPath = path.resolve(grunt.config.get('atom.appDir'), '..', 'LICENSE.md')
fs.writeFileSync(targetPath, licenseText)
else
console.log licenseText
done()
getLicenseText = (dependencyLicenses) ->
{keys} = require 'underscore-plus'
text = """
#{fs.readFileSync('LICENSE.md', 'utf8')}
This application bundles the following third-party packages in accordance
with the following licenses:\n\n
"""
names = keys(dependencyLicenses).sort()
for name in names
{license, source, sourceText} = dependencyLicenses[name]
text += "-------------------------------------------------------------------------\n\n"
text += "Package: #{name}\n"
text += "License: #{license}\n"
text += "License Source: #{source}\n" if source?
if sourceText?
text += "Source Text:\n\n"
text += sourceText
text += '\n'
text

View File

@ -1,39 +0,0 @@
path = require 'path'
fs = require 'fs-plus'
ModuleCache = require '../../src/module-cache'
module.exports = (grunt) ->
grunt.registerTask 'generate-module-cache', 'Generate a module cache for all core modules and packages', ->
appDir = grunt.config.get('atom.appDir')
{packageDependencies} = grunt.file.readJSON('package.json')
for packageName, version of packageDependencies
ModuleCache.create(path.join(appDir, 'node_modules', packageName))
ModuleCache.create(appDir)
metadata = grunt.file.readJSON(path.join(appDir, 'package.json'))
metadata._atomModuleCache.folders.forEach (folder) ->
if '' in folder.paths
folder.paths = [
''
'exports'
'spec'
'src'
'src/main-process'
'static'
'vendor'
]
# Reactionary does not have an explicit react dependency
metadata._atomModuleCache.folders.push
paths: [
'node_modules/reactionary-atom-fork/lib'
]
dependencies: {
'react-atom-fork': metadata.dependencies['react-atom-fork']
}
grunt.file.write(path.join(appDir, 'package.json'), JSON.stringify(metadata))

View File

@ -1,73 +0,0 @@
path = require 'path'
fs = require 'fs-plus'
runas = null
temp = require 'temp'
module.exports = (grunt) ->
{cp, fillTemplate, mkdir, rm} = require('./task-helpers')(grunt)
grunt.registerTask 'install', 'Install the built application', ->
appName = grunt.config.get('atom.appName')
appFileName = grunt.config.get('atom.appFileName')
apmFileName = grunt.config.get('atom.apmFileName')
buildDir = grunt.config.get('atom.buildDir')
installDir = grunt.config.get('atom.installDir')
shellAppDir = grunt.config.get('atom.shellAppDir')
{description} = grunt.config.get('atom.metadata')
if process.platform is 'win32'
done = @async()
grunt.log.ok("Installing into \"#{installDir}\" from \"#{shellAppDir}\"")
parentInstallDir = path.resolve(installDir, '..')
adminRequired = false
try
rm installDir
mkdir installDir
catch err
grunt.log.ok("Admin elevation required for write access to \"#{installDir}\"")
adminRequired = true
runas ?= require 'runas'
copyFolder = path.resolve 'script', 'copy-folder.cmd'
if runas('cmd', ['/c', copyFolder, shellAppDir, installDir], admin: adminRequired) isnt 0
grunt.fail.fatal("Unable to copy files.")
else
grunt.log.ok("Completed successfully.")
done()
else if process.platform is 'darwin'
rm installDir
mkdir path.dirname(installDir)
tempFolder = temp.path()
mkdir tempFolder
cp shellAppDir, tempFolder
fs.renameSync(tempFolder, installDir)
else
shareDir = path.join(installDir, 'share', appFileName)
rm shareDir
mkdir path.dirname(shareDir)
cp shellAppDir, shareDir
unless installDir.indexOf(process.env.TMPDIR ? '/tmp') is 0
iconPath = path.join(shareDir, 'resources', 'app.asar.unpacked', 'resources', 'atom.png')
mkdir path.join(installDir, 'share', 'applications')
fillTemplate(
path.join('resources', 'linux', 'atom.desktop.in'),
path.join(installDir, 'share', 'applications', appFileName + '.desktop'),
{appName, appFileName, description, iconPath, installDir}
)
binDir = path.join(installDir, 'bin')
mkdir binDir
cp 'atom.sh', path.join(binDir, appFileName)
rm(path.join(binDir, apmFileName))
fs.symlinkSync(
path.join('..', 'share', appFileName, 'resources', 'app', 'apm', 'node_modules', '.bin', 'apm'),
path.join(binDir, apmFileName)
)
fs.chmodSync(path.join(shareDir, 'atom'), '755')

View File

@ -1,354 +0,0 @@
module.exports =
'aws-sign@0.3.0':
repository: 'https://github.com/mikeal/aws-sign'
license: 'MIT'
source: 'index.js'
sourceText: """
/*!
* knox - auth
* Copyright(c) 2010 LearnBoost <dev@learnboost.com>
* MIT Licensed
*/
<content omitted>
"""
'bufferjs@2.0.0':
repository: 'https://github.com/coolaj86/node-bufferjs'
license: 'MIT'
source: 'LICENSE.MIT'
sourceText: """
Copyright (c) 2010 AJ ONeal (and Contributors)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
'buffers@0.1.1':
repository: "http://github.com/substack/node-buffers"
license: 'MIT'
source: 'README.markdown'
sourceText: """
<content omitted>
license
=======
MIT/X11
"""
'cheerio@0.15.0':
repository: "https://github.com/cheeriojs/cheerio"
license: 'MIT'
source: 'https://github.com/cheeriojs/cheerio/blob/master/package.json'
'specificity@0.1.3':
repository: 'https://github.com/keeganstreet/specificity'
license: 'MIT'
source: 'package.json in repository'
'promzard@0.2.0':
license: 'ISC'
source: 'LICENSE in the repository'
sourceText: """
The ISC License
Copyright (c) Isaac Z. Schlueter
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
'jschardet@1.1.1':
license: 'LGPL'
source: 'README.md in the repository'
sourceText: """
JsChardet
=========
Port of python's chardet (http://chardet.feedparser.org/).
License
-------
LGPL
"""
'core-js@0.4.10':
license: 'MIT'
source: 'http://rock.mit-license.org linked in source files and bower.json says MIT'
'log-driver@1.2.4':
license: 'ISC'
source: 'LICENSE file in the repository'
sourceText: """
Copyright (c) 2014, Gregg Caines, gregg@caines.ca
Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
'shelljs@0.3.0':
license: 'BSD'
source: 'LICENSE file in repository - 3-clause BSD (aka BSD-new)'
sourceText: """
Copyright (c) 2012, Artur Adib <arturadib@gmail.com>
All rights reserved.
You may use this project under the terms of the New BSD license as follows:
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of Artur Adib nor the
names of the contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL ARTUR ADIB BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
'json-schema@0.2.2':
repository: 'https://github.com/kriszyp/json-schema'
license: 'BSD'
source: 'README links to https://github.com/dojo/dojo/blob/8b6a5e4c42f9cf777dd39eaae8b188e0ebb59a4c/LICENSE'
sourceText: """
Dojo is available under *either* the terms of the modified BSD license *or* the
Academic Free License version 2.1. As a recipient of Dojo, you may choose which
license to receive this code under (except as noted in per-module LICENSE
files). Some modules may not be the copyright of the Dojo Foundation. These
modules contain explicit declarations of copyright in both the LICENSE files in
the directories in which they reside and in the code itself. No external
contributions are allowed under licenses which are fundamentally incompatible
with the AFL or BSD licenses that Dojo is distributed under.
The text of the AFL and BSD licenses is reproduced below.
-------------------------------------------------------------------------------
The "New" BSD License:
**********************
Copyright (c) 2005-2015, The Dojo Foundation
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the Dojo Foundation nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-------------------------------------------------------------------------------
The Academic Free License, v. 2.1:
**********************************
This Academic Free License (the "License") applies to any original work of
authorship (the "Original Work") whose owner (the "Licensor") has placed the
following notice immediately following the copyright notice for the Original
Work:
Licensed under the Academic Free License version 2.1
1) Grant of Copyright License. Licensor hereby grants You a world-wide,
royalty-free, non-exclusive, perpetual, sublicenseable license to do the
following:
a) to reproduce the Original Work in copies;
b) to prepare derivative works ("Derivative Works") based upon the Original
Work;
c) to distribute copies of the Original Work and Derivative Works to the
public;
d) to perform the Original Work publicly; and
e) to display the Original Work publicly.
2) Grant of Patent License. Licensor hereby grants You a world-wide,
royalty-free, non-exclusive, perpetual, sublicenseable license, under patent
claims owned or controlled by the Licensor that are embodied in the Original
Work as furnished by the Licensor, to make, use, sell and offer for sale the
Original Work and Derivative Works.
3) Grant of Source Code License. The term "Source Code" means the preferred
form of the Original Work for making modifications to it and all available
documentation describing how to modify the Original Work. Licensor hereby
agrees to provide a machine-readable copy of the Source Code of the Original
Work along with each copy of the Original Work that Licensor distributes.
Licensor reserves the right to satisfy this obligation by placing a
machine-readable copy of the Source Code in an information repository
reasonably calculated to permit inexpensive and convenient access by You for as
long as Licensor continues to distribute the Original Work, and by publishing
the address of that information repository in a notice immediately following
the copyright notice that applies to the Original Work.
4) Exclusions From License Grant. Neither the names of Licensor, nor the names
of any contributors to the Original Work, nor any of their trademarks or
service marks, may be used to endorse or promote products derived from this
Original Work without express prior written permission of the Licensor. Nothing
in this License shall be deemed to grant any rights to trademarks, copyrights,
patents, trade secrets or any other intellectual property of Licensor except as
expressly stated herein. No patent license is granted to make, use, sell or
offer to sell embodiments of any patent claims other than the licensed claims
defined in Section 2. No right is granted to the trademarks of Licensor even if
such marks are included in the Original Work. Nothing in this License shall be
interpreted to prohibit Licensor from licensing under different terms from this
License any Original Work that Licensor otherwise would have a right to
license.
5) This section intentionally omitted.
6) Attribution Rights. You must retain, in the Source Code of any Derivative
Works that You create, all copyright, patent or trademark notices from the
Source Code of the Original Work, as well as any notices of licensing and any
descriptive text identified therein as an "Attribution Notice." You must cause
the Source Code for any Derivative Works that You create to carry a prominent
Attribution Notice reasonably calculated to inform recipients that You have
modified the Original Work.
7) Warranty of Provenance and Disclaimer of Warranty. Licensor warrants that
the copyright in and to the Original Work and the patent rights granted herein
by Licensor are owned by the Licensor or are sublicensed to You under the terms
of this License with the permission of the contributor(s) of those copyrights
and patent rights. Except as expressly stated in the immediately proceeding
sentence, the Original Work is provided under this License on an "AS IS" BASIS
and WITHOUT WARRANTY, either express or implied, including, without limitation,
the warranties of NON-INFRINGEMENT, MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY OF THE ORIGINAL WORK IS WITH YOU.
This DISCLAIMER OF WARRANTY constitutes an essential part of this License. No
license to Original Work is granted hereunder except under this disclaimer.
8) Limitation of Liability. Under no circumstances and under no legal theory,
whether in tort (including negligence), contract, or otherwise, shall the
Licensor be liable to any person for any direct, indirect, special, incidental,
or consequential damages of any character arising as a result of this License
or the use of the Original Work including, without limitation, damages for loss
of goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses. This limitation of liability shall not
apply to liability for death or personal injury resulting from Licensor's
negligence to the extent applicable law prohibits such limitation. Some
jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
9) Acceptance and Termination. If You distribute copies of the Original Work or
a Derivative Work, You must make a reasonable effort under the circumstances to
obtain the express assent of recipients to the terms of this License. Nothing
else but this License (or another written agreement between Licensor and You)
grants You permission to create Derivative Works based upon the Original Work
or to exercise any of the rights granted in Section 1 herein, and any attempt
to do so except under the terms of this License (or another written agreement
between Licensor and You) is expressly prohibited by U.S. copyright law, the
equivalent laws of other countries, and by international treaty. Therefore, by
exercising any of the rights granted to You in Section 1 herein, You indicate
Your acceptance of this License and all of its terms and conditions.
10) Termination for Patent Action. This License shall terminate automatically
and You may no longer exercise any of the rights granted to You by this License
as of the date You commence an action, including a cross-claim or counterclaim,
against Licensor or any licensee alleging that the Original Work infringes a
patent. This termination provision shall not apply for an action alleging
patent infringement by combinations of the Original Work with other software or
hardware.
11) Jurisdiction, Venue and Governing Law. Any action or suit relating to this
License may be brought only in the courts of a jurisdiction wherein the
Licensor resides or in which Licensor conducts its primary business, and under
the laws of that jurisdiction excluding its conflict-of-law provisions. The
application of the United Nations Convention on Contracts for the International
Sale of Goods is expressly excluded. Any use of the Original Work outside the
scope of this License or after its termination shall be subject to the
requirements and penalties of the U.S. Copyright Act, 17 U.S.C. § 101 et
seq., the equivalent laws of other countries, and international treaty. This
section shall survive the termination of this License.
12) Attorneys Fees. In any action to enforce the terms of this License or
seeking damages relating thereto, the prevailing party shall be entitled to
recover its costs and expenses, including, without limitation, reasonable
attorneys' fees and costs incurred in connection with such action, including
any appeal of such action. This section shall survive the termination of this
License.
13) Miscellaneous. This License represents the complete agreement concerning
the subject matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent necessary to
make it enforceable.
14) Definition of "You" in This License. "You" throughout this License, whether
in upper or lower case, means an individual or a legal entity exercising rights
under, and complying with all of the terms of, this License. For legal
entities, "You" includes any entity that controls, is controlled by, or is
under common control with you. For purposes of this definition, "control" means
(i) the power, direct or indirect, to cause the direction or management of such
entity, whether by contract or otherwise, or (ii) ownership of fifty percent
(50%) or more of the outstanding shares, or (iii) beneficial ownership of such
entity.
15) Right to Use. You may use the Original Work in all ways not otherwise
restricted or conditioned by this License or by law, and Licensor promises not
to interfere with or be responsible for such uses by You.
This license is Copyright (C) 2003-2004 Lawrence E. Rosen. All rights reserved.
Permission is hereby granted to copy and distribute this license without
modification. This license may not be modified without the express written
permission of its copyright owner.
"""
'inherit@2.2.2':
license: 'MIT'
repository: 'https://github.com/dfilatov/inherit'
source: 'LICENSE.md'
sourceText: """
Copyright (c) 2012 Dmitry Filatov
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
'tweetnacl@0.14.3':
license: 'Public Domain'
repository: 'https://github.com/dchest/tweetnacl'
source: 'COPYING.txt'
sourceText: """
Public Domain
The person who associated a work with this deed has dedicated the work to the
public domain by waiving all of his or her rights to the work worldwide under
copyright law, including all related and neighboring rights, to the extent
allowed by law.
You can copy, modify, distribute and perform the work, even for commercial
purposes, all without asking permission.
"""

View File

@ -1,59 +0,0 @@
path = require 'path'
module.exports = (grunt) ->
{spawn, fillTemplate} = require('./task-helpers')(grunt)
grunt.registerTask 'mkdeb', 'Create debian package', ->
done = @async()
appName = grunt.config.get('atom.appName')
appFileName = grunt.config.get('atom.appFileName')
apmFileName = grunt.config.get('atom.apmFileName')
buildDir = grunt.config.get('atom.buildDir')
installDir = '/usr'
shellAppDir = grunt.config.get('atom.shellAppDir')
{version, description} = grunt.config.get('atom.metadata')
channel = grunt.config.get('atom.channel')
if process.arch is 'ia32'
arch = 'i386'
else if process.arch is 'x64'
arch = 'amd64'
else
return done("Unsupported arch #{process.arch}")
desktopFilePath = path.join(buildDir, appFileName + '.desktop')
fillTemplate(
path.join('resources', 'linux', 'atom.desktop.in'),
desktopFilePath,
{appName, appFileName, description, installDir, iconPath: appFileName}
)
getInstalledSize shellAppDir, (error, installedSize) ->
if error?
return done(error)
controlFilePath = path.join(buildDir, 'control')
fillTemplate(
path.join('resources', 'linux', 'debian', 'control.in'),
controlFilePath,
{appFileName, version, arch, installedSize, description}
)
iconPath = path.join(shellAppDir, 'resources', 'app.asar.unpacked', 'resources', 'atom.png')
cmd = path.join('script', 'mkdeb')
args = [appFileName, version, channel, arch, controlFilePath, desktopFilePath, iconPath, buildDir]
spawn {cmd, args}, (error) ->
if error?
done(error)
else
grunt.log.ok "Created #{buildDir}/#{appFileName}-#{version}-#{arch}.deb"
done()
getInstalledSize = (directory, callback) ->
cmd = 'du'
args = ['-sk', directory]
spawn {cmd, args}, (error, {stdout}) ->
installedSize = stdout.split(/\s+/)?[0] or '200000' # default to 200MB
callback(null, installedSize)

View File

@ -1,54 +0,0 @@
path = require 'path'
module.exports = (grunt) ->
{spawn, fillTemplate, rm, mkdir} = require('./task-helpers')(grunt)
grunt.registerTask 'mkrpm', 'Create rpm package', ->
done = @async()
appName = grunt.config.get('atom.appName')
appFileName = grunt.config.get('atom.appFileName')
apmFileName = grunt.config.get('atom.apmFileName')
buildDir = grunt.config.get('atom.buildDir')
installDir = '/usr'
{version, description} = grunt.config.get('atom.metadata')
if process.arch is 'ia32'
arch = 'i386'
else if process.arch is 'x64'
arch = 'amd64'
else
return done("Unsupported arch #{process.arch}")
desktopFilePath = path.join(buildDir, appFileName + '.desktop')
fillTemplate(
path.join('resources', 'linux', 'atom.desktop.in'),
desktopFilePath,
{appName, appFileName, description, installDir, iconPath: appFileName}
)
# RPM versions can't have dashes in them.
# * http://www.rpm.org/max-rpm/ch-rpm-file-format.html
# * https://github.com/mojombo/semver/issues/145
version = version.replace(/-beta/, "~beta")
version = version.replace(/-dev/, "~dev")
specFilePath = path.join(buildDir, appFileName + '.spec')
fillTemplate(
path.join('resources', 'linux', 'redhat', 'atom.spec.in'),
specFilePath,
{appName, appFileName, apmFileName, installDir, version, description}
)
rpmDir = path.join(buildDir, 'rpm')
rm rpmDir
mkdir rpmDir
cmd = path.join('script', 'mkrpm')
args = [appName, appFileName, specFilePath, desktopFilePath, buildDir]
spawn {cmd, args}, (error) ->
if error?
done(error)
else
grunt.log.ok "Created rpm package in #{rpmDir}"
done()

View File

@ -1,30 +0,0 @@
path = require 'path'
module.exports = (grunt) ->
{spawn, fillTemplate} = require('./task-helpers')(grunt)
grunt.registerTask 'mktar', 'Create an archive', ->
done = @async()
appFileName = grunt.config.get('atom.appFileName')
buildDir = grunt.config.get('atom.buildDir')
shellAppDir = grunt.config.get('atom.shellAppDir')
{version, description} = grunt.config.get('atom.metadata')
if process.arch is 'ia32'
arch = 'i386'
else if process.arch is 'x64'
arch = 'amd64'
else
return done("Unsupported arch #{process.arch}")
iconPath = path.join(shellAppDir, 'resources', 'app.asar.unpacked', 'resources', 'atom.png')
cmd = path.join('script', 'mktar')
args = [appFileName, version, arch, iconPath, buildDir]
spawn {cmd, args}, (error) ->
if error?
done(error)
else
grunt.log.ok "Created " + path.join(buildDir, "#{appFileName}-#{version}-#{arch}.tar.gz")
done()

View File

@ -1,6 +0,0 @@
module.exports = (grunt) ->
{spawn} = require('./task-helpers')(grunt)
grunt.registerTask 'nof', 'Un-focus all specs', ->
nof = require.resolve('.bin/nof')
spawn({cmd: nof, args: ['spec']}, @async())

View File

@ -1,34 +0,0 @@
asar = require 'asar'
path = require 'path'
module.exports = (grunt) ->
grunt.registerTask 'output-build-filetypes', 'Log counts for each filetype in the built application', ->
shellAppDir = grunt.config.get('atom.shellAppDir')
types = {}
registerFile = (filePath) ->
extension = path.extname(filePath) or path.basename(filePath)
types[extension] ?= []
types[extension].push(filePath)
if extension is '.asar'
asar.listPackage(filePath).forEach (archivePath) ->
archivePath = archivePath.substring(1)
unless asar.statFile(filePath, archivePath, true).files
registerFile(archivePath)
grunt.file.recurse shellAppDir, (absolutePath, rootPath, relativePath, fileName) -> registerFile(absolutePath)
extensions = Object.keys(types).sort (extension1, extension2) ->
diff = types[extension2].length - types[extension1].length
if diff is 0
extension1.toLowerCase().localeCompare(extension2.toLowerCase())
else
diff
if extension = grunt.option('extension')
types[extension]?.sort().forEach (filePath) ->
grunt.log.error filePath
else
extensions[0...25].forEach (extension) ->
grunt.log.error "#{extension}: #{types[extension].length}"

View File

@ -1,25 +0,0 @@
module.exports = (grunt) ->
{spawn} = require('./task-helpers')(grunt)
grunt.registerTask 'output-disk-space', 'Print diskspace available', ->
return unless process.platform is 'darwin'
done = @async()
cmd = 'df'
args = ['-Hl']
spawn {cmd, args}, (error, result, code) ->
return done(error) if error?
lines = result.stdout.split("\n")
for line in lines[1..]
[filesystem, size, used, avail, capacity, extra] = line.split(/\s+/)
capacity = parseInt(capacity)
if capacity > 90
grunt.log.error("#{filesystem} is at #{capacity}% capacity!")
else if capacity > 80
grunt.log.ok("#{filesystem} is at #{capacity}% capacity.")
done()

View File

@ -1,22 +0,0 @@
path = require 'path'
module.exports = (grunt) ->
grunt.registerTask 'output-for-loop-returns', 'Log methods that end with a for loop', ->
appDir = grunt.config.get('atom.appDir')
jsPaths = []
grunt.file.recurse path.join(appDir, 'src'), (absolutePath, rootPath, relativePath, fileName) ->
jsPaths.push(absolutePath) if path.extname(fileName) is '.js'
jsPaths.forEach (jsPath) ->
js = grunt.file.read(jsPath)
method = null
for line, index in js.split('\n')
[match, className, methodName] = /^\s*([a-zA-Z]+)\.(?:prototype\.)?([a-zA-Z]+)\s*=\s*function\(/.exec(line) ? []
if className and methodName
method = "#{className}::#{methodName}"
else
[match, ctorName] = /^\s*function\s+([a-zA-Z]+)\(/.exec(line) ? []
if /^\s*return\s+_results;\s*$/.test(line)
console.log(method ? "#{path.basename(jsPath)}:#{index}")

View File

@ -1,18 +0,0 @@
path = require 'path'
module.exports = (grunt) ->
grunt.registerTask 'output-long-paths', 'Log long paths in the built application', ->
shellAppDir = grunt.config.get('atom.shellAppDir')
longPaths = []
grunt.file.recurse shellAppDir, (absolutePath, rootPath, relativePath, fileName) ->
if relativePath
fullPath = path.join(relativePath, fileName)
else
fullPath = fileName
longPaths.push(fullPath) if fullPath.length >= 175
longPaths.sort (longPath1, longPath2) -> longPath2.length - longPath1.length
longPaths.forEach (longPath) ->
grunt.log.error "#{longPath.length} character path: #{longPath}"

View File

@ -1,66 +0,0 @@
fs = require 'fs'
path = require 'path'
module.exports = (grunt) ->
grunt.registerTask 'output-module-counts', 'Log modules where more than one copy exists in node_modules', ->
nodeModulesDir = path.resolve(__dirname, '..', '..', 'node_modules')
otherModules = {}
atomModules = {}
sortModuleNames = (modules) ->
Object.keys(modules).sort (name1, name2) ->
diff = modules[name2].count - modules[name1].count
diff = name1.localeCompare(name2) if diff is 0
diff
getAtomTotal = ->
Object.keys(atomModules).length
getOtherTotal = ->
Object.keys(otherModules).length
recurseHandler = (absolutePath, rootPath, relativePath, fileName) ->
return if fileName isnt 'package.json'
{name, version, repository} = grunt.file.readJSON(absolutePath)
return unless name and version
repository = repository.url if repository?.url
if /.+\/atom\/.+/.test(repository)
modules = atomModules
else
modules = otherModules
modules[name] ?= {versions: {}, count: 0}
modules[name].count++
modules[name].versions[version] = true
walkNodeModuleDir = ->
grunt.file.recurse(nodeModulesDir, recurseHandler)
# Handle broken symlinks that grunt.file.recurse fails to handle
loop
try
walkNodeModuleDir()
break
catch error
if error.code is 'ENOENT'
fs.unlinkSync(error.path)
otherModules = {}
atomModules = {}
else
break
if getAtomTotal() > 0
console.log "Atom Modules: #{getAtomTotal()}"
sortModuleNames(atomModules).forEach (name) ->
{count, versions, atom} = atomModules[name]
grunt.log.error "#{name}: #{count} (#{Object.keys(versions).join(', ')})" if count > 1
console.log()
console.log "Other Modules: #{getOtherTotal()}"
sortModuleNames(otherModules).forEach (name) ->
{count, versions, atom} = otherModules[name]
grunt.log.error "#{name}: #{count} (#{Object.keys(versions).join(', ')})" if count > 1

View File

@ -1,87 +0,0 @@
path = require 'path'
fs = require 'fs'
temp = require('temp').track()
LessCache = require 'less-cache'
module.exports = (grunt) ->
{rm} = require('./task-helpers')(grunt)
cacheMisses = 0
cacheHits = 0
importFallbackVariables = (lessFilePath) ->
if lessFilePath.indexOf('static') is 0
false
else
true
grunt.registerMultiTask 'prebuild-less', 'Prebuild cached of compiled Less files', ->
uiThemes = [
'atom-dark-ui'
'atom-light-ui'
'one-dark-ui'
'one-light-ui'
]
syntaxThemes = [
'atom-dark-syntax'
'atom-light-syntax'
'one-dark-syntax'
'one-light-syntax'
'solarized-dark-syntax'
'base16-tomorrow-dark-theme'
'base16-tomorrow-light-theme'
]
prebuiltConfigurations = []
uiThemes.forEach (uiTheme) ->
syntaxThemes.forEach (syntaxTheme) ->
prebuiltConfigurations.push([uiTheme, syntaxTheme])
directory = path.join(grunt.config.get('atom.appDir'), 'less-compile-cache')
for configuration in prebuiltConfigurations
importPaths = grunt.config.get('less.options.paths')
themeMains = []
for theme in configuration
# TODO Use AtomPackage class once it runs outside of an Atom context
themePath = path.resolve('node_modules', theme)
if fs.existsSync(path.join(themePath, 'stylesheets'))
stylesheetsDir = path.join(themePath, 'stylesheets')
else
stylesheetsDir = path.join(themePath, 'styles')
{main} = grunt.file.readJSON(path.join(themePath, 'package.json'))
main ?= 'index.less'
mainPath = path.join(themePath, main)
themeMains.push(mainPath) if grunt.file.isFile(mainPath)
importPaths.unshift(stylesheetsDir) if grunt.file.isDir(stylesheetsDir)
grunt.verbose.writeln("Building Less cache for #{configuration.join(', ').yellow}")
lessCache = new LessCache
cacheDir: directory
fallbackDir: grunt.config.get('prebuild-less.options.cachePath')
syncCaches: true
resourcePath: path.resolve('.')
importPaths: importPaths
cssForFile = (file) ->
less = fs.readFileSync(file, 'utf8')
if importFallbackVariables(file)
baseVarImports = """
@import "variables/ui-variables";
@import "variables/syntax-variables";
"""
less = [baseVarImports, less].join('\n')
lessCache.cssForFile(file, less)
for file in @filesSrc
grunt.verbose.writeln("File #{file.cyan} created in cache.")
cssForFile(file)
for file in themeMains
grunt.verbose.writeln("File #{file.cyan} created in cache.")
cssForFile(file)
cacheMisses += lessCache.stats.misses
cacheHits += lessCache.stats.hits
grunt.log.ok("#{cacheMisses} files compiled, #{cacheHits} files reused")

View File

@ -1,281 +0,0 @@
child_process = require 'child_process'
path = require 'path'
_ = require 'underscore-plus'
async = require 'async'
fs = require 'fs-plus'
GitHub = require 'github-releases'
request = require 'request'
AWS = require 'aws-sdk'
grunt = null
token = process.env.ATOM_ACCESS_TOKEN
repo = process.env.ATOM_PUBLISH_REPO ? 'atom/atom'
defaultHeaders =
Authorization: "token #{token}"
'User-Agent': 'Atom'
module.exports = (gruntObject) ->
grunt = gruntObject
{cp} = require('./task-helpers')(grunt)
grunt.registerTask 'publish-build', 'Publish the built app', ->
tasks = []
tasks.push('build-docs', 'prepare-docs') if process.platform is 'darwin'
tasks.push('upload-assets')
grunt.task.run(tasks)
grunt.registerTask 'prepare-docs', 'Move api.json to atom-api.json', ->
docsOutputDir = grunt.config.get('docsOutputDir')
buildDir = grunt.config.get('atom.buildDir')
cp path.join(docsOutputDir, 'api.json'), path.join(buildDir, 'atom-api.json')
grunt.registerTask 'upload-assets', 'Upload the assets to a GitHub release', ->
releaseBranch = grunt.config.get('atom.releaseBranch')
isPrerelease = grunt.config.get('atom.channel') is 'beta'
unless releaseBranch?
grunt.log.ok("Skipping upload-assets to #{repo} repo because this is not a release branch")
return
grunt.log.ok("Starting upload-assets to #{repo} repo")
doneCallback = @async()
startTime = Date.now()
done = (args...) ->
elapsedTime = Math.round((Date.now() - startTime) / 100) / 10
grunt.log.ok("Upload time: #{elapsedTime}s")
doneCallback(args...)
unless token
return done(new Error('ATOM_ACCESS_TOKEN environment variable not set'))
buildDir = grunt.config.get('atom.buildDir')
assets = getAssets()
zipAssets buildDir, assets, (error) ->
return done(error) if error?
getAtomDraftRelease isPrerelease, releaseBranch, (error, release) ->
return done(error) if error?
assetNames = (asset.assetName for asset in assets)
deleteExistingAssets release, assetNames, (error) ->
return done(error) if error?
uploadAssets(release, buildDir, assets, done)
getAssets = ->
{cp} = require('./task-helpers')(grunt)
{version} = grunt.file.readJSON('package.json')
buildDir = grunt.config.get('atom.buildDir')
appName = grunt.config.get('atom.appName')
appFileName = grunt.config.get('atom.appFileName')
switch process.platform
when 'darwin'
[
{assetName: 'atom-mac.zip', sourcePath: appName}
{assetName: 'atom-mac-symbols.zip', sourcePath: 'Atom.breakpad.syms'}
{assetName: 'atom-api.json', sourcePath: 'atom-api.json'}
]
when 'win32'
assets = [{assetName: 'atom-windows.zip', sourcePath: appName}]
for squirrelAsset in ['AtomSetup.exe', 'AtomSetup.msi', 'RELEASES', "atom-#{version}-full.nupkg", "atom-#{version}-delta.nupkg"]
cp path.join(buildDir, 'installer', squirrelAsset), path.join(buildDir, squirrelAsset)
assets.push({assetName: squirrelAsset, sourcePath: assetName})
assets
when 'linux'
if process.arch is 'ia32'
arch = 'i386'
else
arch = 'amd64'
# Check for a Debian build
sourcePath = path.join(buildDir, "#{appFileName}-#{version}-#{arch}.deb")
assetName = "atom-#{arch}.deb"
# Check for a Fedora build
unless fs.isFileSync(sourcePath)
rpmName = fs.readdirSync("#{buildDir}/rpm")[0]
sourcePath = path.join(buildDir, "rpm", rpmName)
if process.arch is 'ia32'
arch = 'i386'
else
arch = 'x86_64'
assetName = "atom.#{arch}.rpm"
cp sourcePath, path.join(buildDir, assetName)
assets = [{assetName, sourcePath}]
# Check for an archive build on a debian build machine.
# We could provide a Fedora version if some libraries are not compatible
sourcePath = path.join(buildDir, "#{appFileName}-#{version}-#{arch}.tar.gz")
if fs.isFileSync(sourcePath)
assetName = "atom-#{arch}.tar.gz"
cp sourcePath, path.join(buildDir, assetName)
assets.push({assetName, sourcePath})
assets
logError = (message, error, details) ->
grunt.log.error(message)
grunt.log.error(error.message ? error) if error?
grunt.log.error(require('util').inspect(details)) if details
zipAssets = (buildDir, assets, callback) ->
zip = (directory, sourcePath, assetName, callback) ->
grunt.log.ok("Zipping #{sourcePath} into #{assetName}")
if process.platform is 'win32'
sevenZipPath = if process.env.JANKY_SHA1? then "C:/psmodules/" else ""
zipCommand = "#{sevenZipPath}7z.exe a -r \"#{assetName}\" \"#{sourcePath}\""
else
zipCommand = "zip -r --symlinks '#{assetName}' '#{sourcePath}'"
options = {cwd: directory, maxBuffer: Infinity}
child_process.exec zipCommand, options, (error, stdout, stderr) ->
logError("Zipping #{sourcePath} failed", error, stderr) if error?
callback(error)
tasks = []
for {assetName, sourcePath} in assets when path.extname(assetName) is '.zip'
fs.removeSync(path.join(buildDir, assetName))
tasks.push(zip.bind(this, buildDir, sourcePath, assetName))
async.parallel(tasks, callback)
getAtomDraftRelease = (isPrerelease, branchName, callback) ->
grunt.log.ok("Obtaining GitHub draft release for #{branchName}")
atomRepo = new GitHub({repo: repo, token})
atomRepo.getReleases {prerelease: isPrerelease}, (error, releases=[]) ->
if error?
logError("Fetching #{repo} #{if isPrerelease then "pre" else "" }releases failed", error, releases)
callback(error)
else
[firstDraft] = releases.filter ({draft}) -> draft
if firstDraft?
options =
uri: firstDraft.assets_url
method: 'GET'
headers: defaultHeaders
json: true
request options, (error, response, assets=[]) ->
if error? or response.statusCode isnt 200
logError('Fetching draft release assets failed', error, assets)
callback(error ? new Error(response.statusCode))
else
grunt.log.ok("Using GitHub draft release #{firstDraft.name}")
firstDraft.assets = assets
callback(null, firstDraft)
else
createAtomDraftRelease(isPrerelease, branchName, callback)
createAtomDraftRelease = (isPrerelease, branchName, callback) ->
grunt.log.ok("Creating GitHub draft release #{branchName}")
{version} = require('../../package.json')
options =
uri: "https://api.github.com/repos/#{repo}/releases"
method: 'POST'
headers: defaultHeaders
json:
tag_name: "v#{version}"
prerelease: isPrerelease
target_commitish: branchName
name: version
draft: true
body: """
### Notable Changes
* Something new
"""
request options, (error, response, body='') ->
if error? or response.statusCode isnt 201
logError("Creating #{repo} draft release failed", error, body)
callback(error ? new Error(response.statusCode))
else
callback(null, body)
deleteRelease = (release) ->
grunt.log.ok("Deleting GitHub release #{release.tag_name}")
options =
uri: release.url
method: 'DELETE'
headers: defaultHeaders
json: true
request options, (error, response, body='') ->
if error? or response.statusCode isnt 204
logError('Deleting release failed', error, body)
deleteExistingAssets = (release, assetNames, callback) ->
grunt.log.ok("Deleting #{assetNames.join(',')} from GitHub release #{release.tag_name}")
[callback, assetNames] = [assetNames, callback] if not callback?
deleteAsset = (url, callback) ->
options =
uri: url
method: 'DELETE'
headers: defaultHeaders
request options, (error, response, body='') ->
if error? or response.statusCode isnt 204
logError('Deleting existing release asset failed', error, body)
callback(error ? new Error(response.statusCode))
else
callback()
tasks = []
for asset in release.assets when not assetNames? or asset.name in assetNames
tasks.push(deleteAsset.bind(this, asset.url))
async.parallel(tasks, callback)
uploadAssets = (release, buildDir, assets, callback) ->
uploadToReleases = (release, assetName, assetPath, callback) ->
grunt.log.ok("Uploading #{assetName} to GitHub release #{release.tag_name}")
options =
uri: release.upload_url.replace(/\{.*$/, "?name=#{assetName}")
method: 'POST'
headers: _.extend({
'Content-Type': 'application/zip'
'Content-Length': fs.getSizeSync(assetPath)
}, defaultHeaders)
assetRequest = request options, (error, response, body='') ->
if error? or response.statusCode >= 400
logError("Upload release asset #{assetName} to Releases failed", error, body)
callback(error ? new Error(response.statusCode))
else
callback(null, release)
fs.createReadStream(assetPath).pipe(assetRequest)
uploadToS3 = (release, assetName, assetPath, callback) ->
s3Key = process.env.BUILD_ATOM_RELEASES_S3_KEY
s3Secret = process.env.BUILD_ATOM_RELEASES_S3_SECRET
s3Bucket = process.env.BUILD_ATOM_RELEASES_S3_BUCKET
unless s3Key and s3Secret and s3Bucket
callback(new Error('BUILD_ATOM_RELEASES_S3_KEY, BUILD_ATOM_RELEASES_S3_SECRET, and BUILD_ATOM_RELEASES_S3_BUCKET environment variables must be set.'))
return
s3Info =
accessKeyId: s3Key
secretAccessKey: s3Secret
s3 = new AWS.S3 s3Info
key = "releases/#{release.tag_name}/#{assetName}"
grunt.log.ok("Uploading to S3 #{key}")
uploadParams =
Bucket: s3Bucket
ACL: 'public-read'
Key: key
Body: fs.createReadStream(assetPath)
s3.upload uploadParams, (error, data) ->
if error?
logError("Upload release asset #{assetName} to S3 failed", error)
callback(error)
else
callback(null, release)
tasks = []
for {assetName} in assets
assetPath = path.join(buildDir, assetName)
tasks.push(uploadToReleases.bind(this, release, assetName, assetPath))
tasks.push(uploadToS3.bind(this, release, assetName, assetPath))
async.parallel(tasks, callback)

View File

@ -1,13 +0,0 @@
path = require 'path'
module.exports = (grunt) ->
grunt.registerTask 'set-exe-icon', 'Set icon of the exe', ->
done = @async()
channel = grunt.config.get('atom.channel')
shellAppDir = grunt.config.get('atom.shellAppDir')
shellExePath = path.join(shellAppDir, 'atom.exe')
iconPath = path.resolve('resources', 'app-icons', channel, 'atom.ico')
rcedit = require('rcedit')
rcedit(shellExePath, {'icon': iconPath}, done)

View File

@ -1,57 +0,0 @@
fs = require 'fs'
path = require 'path'
module.exports = (grunt) ->
{spawn} = require('./task-helpers')(grunt)
getVersion = (callback) ->
shouldUseCommitHash = grunt.config.get('atom.channel') is 'dev'
inRepository = fs.existsSync(path.resolve(__dirname, '..', '..', '.git'))
{version} = require(path.join(grunt.config.get('atom.appDir'), 'package.json'))
if shouldUseCommitHash and inRepository
cmd = 'git'
args = ['rev-parse', '--short', 'HEAD']
spawn {cmd, args}, (error, {stdout}={}, code) ->
commitHash = stdout?.trim?()
combinedVersion = "#{version}-#{commitHash}"
callback(error, combinedVersion)
else
callback(null, version)
grunt.registerTask 'set-version', 'Set the version in the plist and package.json', ->
done = @async()
getVersion (error, version) ->
if error?
done(error)
return
appDir = grunt.config.get('atom.appDir')
shellAppDir = grunt.config.get('atom.shellAppDir')
# Replace version field of package.json.
packageJsonPath = path.join(appDir, 'package.json')
packageJson = require(packageJsonPath)
packageJson.version = version
packageJsonString = JSON.stringify(packageJson)
fs.writeFileSync(packageJsonPath, packageJsonString)
if process.platform is 'darwin'
cmd = 'script/set-version'
args = [shellAppDir, version]
spawn {cmd, args}, (error, result, code) -> done(error)
else if process.platform is 'win32'
shellAppDir = grunt.config.get('atom.shellAppDir')
shellExePath = path.join(shellAppDir, 'atom.exe')
strings =
CompanyName: 'GitHub, Inc.'
FileDescription: 'Atom'
LegalCopyright: 'Copyright (C) 2015 GitHub, Inc. All rights reserved'
ProductName: 'Atom'
ProductVersion: version
rcedit = require('rcedit')
rcedit(shellExePath, {'version-string': strings}, done)
else
done()

View File

@ -1,158 +0,0 @@
fs = require 'fs'
path = require 'path'
temp = require('temp').track()
_ = require 'underscore-plus'
async = require 'async'
# Run specs serially on CircleCI
if process.env.CIRCLECI
concurrency = 1
else
concurrency = 2
module.exports = (grunt) ->
{isAtomPackage, spawn} = require('./task-helpers')(grunt)
packageSpecQueue = null
getAppPath = ->
contentsDir = grunt.config.get('atom.contentsDir')
switch process.platform
when 'darwin'
path.join(contentsDir, 'MacOS', 'Atom')
when 'linux'
path.join(contentsDir, 'atom')
when 'win32'
path.join(contentsDir, 'atom.exe')
runPackageSpecs = (callback) ->
failedPackages = []
rootDir = grunt.config.get('atom.shellAppDir')
resourcePath = process.cwd()
appPath = getAppPath()
# Ensure application is executable on Linux
fs.chmodSync(appPath, '755') if process.platform is 'linux'
packageSpecQueue = async.queue (packagePath, callback) ->
if process.platform in ['darwin', 'linux']
options =
cmd: appPath
args: ['--test', "--resource-path=#{resourcePath}", path.join(packagePath, 'spec')]
opts:
cwd: packagePath
env: _.extend({}, process.env, ELECTRON_ENABLE_LOGGING: true, ATOM_PATH: rootDir)
else if process.platform is 'win32'
options =
cmd: process.env.comspec
args: ['/c', appPath, '--test', "--resource-path=#{resourcePath}", "--log-file=ci.log", path.join(packagePath, 'spec')]
opts:
cwd: packagePath
env: _.extend({}, process.env, ELECTRON_ENABLE_LOGGING: true, ATOM_PATH: rootDir)
grunt.log.ok "Launching #{path.basename(packagePath)} specs."
spawn options, (error, results, code) ->
if process.platform is 'win32'
if error
process.stderr.write(fs.readFileSync(path.join(packagePath, 'ci.log')))
fs.unlinkSync(path.join(packagePath, 'ci.log'))
failedPackages.push path.basename(packagePath) if error
callback()
modulesDirectory = path.resolve('node_modules')
for packageDirectory in fs.readdirSync(modulesDirectory)
packagePath = path.join(modulesDirectory, packageDirectory)
continue unless grunt.file.isDir(path.join(packagePath, 'spec'))
continue unless isAtomPackage(packagePath)
packageSpecQueue.push(packagePath)
packageSpecQueue.concurrency = Math.max(1, concurrency - 1)
packageSpecQueue.drain = -> callback(null, failedPackages)
runRendererProcessSpecs = (callback) ->
appPath = getAppPath()
resourcePath = process.cwd()
coreSpecsPath = path.resolve('spec')
if process.platform in ['darwin', 'linux']
options =
cmd: appPath
args: ['--test', "--resource-path=#{resourcePath}", coreSpecsPath, "--user-data-dir=#{temp.mkdirSync('atom-user-data-dir')}"]
opts:
env: _.extend({}, process.env, {ELECTRON_ENABLE_LOGGING: true, ATOM_INTEGRATION_TESTS_ENABLED: true})
stdio: 'inherit'
else if process.platform is 'win32'
options =
cmd: process.env.comspec
args: ['/c', appPath, '--test', "--resource-path=#{resourcePath}", '--log-file=ci.log', coreSpecsPath]
opts:
env: _.extend({}, process.env, {ELECTRON_ENABLE_LOGGING: true, ATOM_INTEGRATION_TESTS_ENABLED: true})
stdio: 'inherit'
grunt.log.ok "Launching core specs (renderer process)."
spawn options, (error, results, code) ->
if process.platform is 'win32'
process.stderr.write(fs.readFileSync('ci.log')) if error
fs.unlinkSync('ci.log')
else
# TODO: Restore concurrency on Windows
packageSpecQueue?.concurrency = concurrency
callback(null, error)
runMainProcessSpecs = (callback) ->
appPath = getAppPath()
resourcePath = process.cwd()
mainProcessSpecsPath = path.resolve('spec/main-process')
if process.platform in ['darwin', 'linux']
options =
cmd: appPath
args: ["--test", "--main-process", "--resource-path=#{resourcePath}", mainProcessSpecsPath]
opts:
env: process.env
stdio: 'inherit'
else if process.platform is 'win32'
options =
cmd: process.env.comspec
args: ['/c', appPath, "--test", "--main-process", "--resource-path=#{resourcePath}", mainProcessSpecsPath]
opts:
env: process.env
stdio: 'inherit'
grunt.log.ok "Launching core specs (main process)."
spawn options, (error, results, code) ->
callback(null, error)
grunt.registerTask 'run-specs', 'Run the specs', ->
done = @async()
startTime = Date.now()
method =
if concurrency is 1
async.series
else
async.parallel
specs = [runRendererProcessSpecs, runMainProcessSpecs, runPackageSpecs]
method specs, (error, results) ->
failedPackages = []
coreSpecFailed = null
[rendererProcessSpecsFailed, mainProcessSpecsFailed, failedPackages] = results
elapsedTime = Math.round((Date.now() - startTime) / 100) / 10
grunt.log.ok("Total spec time: #{elapsedTime}s using #{concurrency} cores")
failures = failedPackages
failures.push "atom core (renderer process)" if rendererProcessSpecsFailed
failures.push "atom core (main process)" if mainProcessSpecsFailed
grunt.log.error("[Error]".red + " #{failures.join(', ')} spec(s) failed") if failures.length > 0
if process.platform is 'win32' and process.env.JANKY_SHA1
done()
else
done(failures.length is 0)

View File

@ -1,75 +0,0 @@
fs = require 'fs-plus'
path = require 'path'
_ = require 'underscore-plus'
module.exports = (grunt) ->
cp: (source, destination, {filter}={}) ->
unless grunt.file.exists(source)
grunt.fatal("Cannot copy non-existent #{source.cyan} to #{destination.cyan}")
copyFile = (sourcePath, destinationPath) ->
return if filter?(sourcePath) or filter?.test?(sourcePath)
stats = fs.lstatSync(sourcePath)
if stats.isSymbolicLink()
grunt.file.mkdir(path.dirname(destinationPath))
fs.symlinkSync(fs.readlinkSync(sourcePath), destinationPath)
else if stats.isFile()
grunt.file.copy(sourcePath, destinationPath)
if grunt.file.exists(destinationPath)
fs.chmodSync(destinationPath, fs.statSync(sourcePath).mode)
if grunt.file.isFile(source)
copyFile(source, destination)
else
try
onFile = (sourcePath) ->
destinationPath = path.join(destination, path.relative(source, sourcePath))
copyFile(sourcePath, destinationPath)
onDirectory = (sourcePath) ->
if fs.isSymbolicLinkSync(sourcePath)
destinationPath = path.join(destination, path.relative(source, sourcePath))
copyFile(sourcePath, destinationPath)
false
else
true
fs.traverseTreeSync source, onFile, onDirectory
catch error
grunt.fatal(error)
grunt.verbose.writeln("Copied #{source.cyan} to #{destination.cyan}.")
mkdir: (args...) ->
grunt.file.mkdir(args...)
rm: (args...) ->
grunt.file.delete(args..., force: true) if grunt.file.exists(args...)
spawn: (options, callback) ->
childProcess = require 'child_process'
stdout = []
stderr = []
error = null
proc = childProcess.spawn(options.cmd, options.args, options.opts)
if proc.stdout?
proc.stdout.on 'data', (data) -> stdout.push(data.toString())
if proc.stderr?
proc.stderr.on 'data', (data) -> stderr.push(data.toString())
proc.on 'error', (processError) -> error ?= processError
proc.on 'close', (exitCode, signal) ->
error ?= new Error(signal) if exitCode isnt 0
results = {stderr: stderr.join(''), stdout: stdout.join(''), code: exitCode}
grunt.log.error results.stderr if exitCode isnt 0
callback(error, results, exitCode)
isAtomPackage: (packagePath) ->
try
{engines} = grunt.file.readJSON(path.join(packagePath, 'package.json'))
engines?.atom?
catch error
false
fillTemplate: (templatePath, outputPath, data) ->
content = _.template(String(fs.readFileSync(templatePath)))(data)
grunt.file.write(outputPath, content)

View File

@ -12,7 +12,9 @@ machine:
general:
artifacts:
- out/Atom.zip
- out/atom-mac.zip
- out/atom-mac-symbols.zip
- docs/output/atom-api.json
dependencies:
pre:
@ -20,26 +22,21 @@ dependencies:
- nvm install 4.4.7
- nvm use 4.4.7
- npm install -g npm
- script/fingerprint-clean
override:
- script/bootstrap
post:
- script/fingerprint-write
- script/build --code-sign --compress-artifacts
cache_directories:
- electron
- apm/node_modules
- build/node_modules
- script/node_modules
- node_modules
- ~/.atom/compile-cache
test:
override:
- caffeinate -s script/grunt ci # Run with caffeinate to prevent screen saver
post:
- zip -r out/Atom.zip out/Atom.app
- script/lint
- caffeinate -s script/test # Run with caffeinate to prevent screen saver
experimental:
notify:

View File

@ -4,33 +4,16 @@ FreeBSD -RELEASE 64-bit is the recommended platform.
## Requirements
* FreeBSD
* `pkg install node`
* `pkg install npm`
* `pkg install libgnome-keyring`
* `npm config set python /usr/local/bin/python2 -g` to ensure that gyp uses Python 2
* FreeBSD
* `pkg install node`
* `pkg install npm`
* `pkg install libgnome-keyring`
* `npm config set python /usr/local/bin/python2 -g` to ensure that gyp uses Python 2
## Instructions
```sh
git clone https://github.com/atom/atom
cd atom
script/build # Creates application at $TMPDIR/atom-build/Atom
sudo script/grunt install # Installs command to /usr/local/bin/atom
```
## Advanced Options
### Custom install directory
```sh
sudo script/grunt install --install-dir /install/atom/here
git clone https://github.com/atom/atom
cd atom
script/build
```
### Custom build directory
```sh
script/build --build-dir /build/atom/here
```
## Troubleshooting

View File

@ -4,37 +4,54 @@ Ubuntu LTS 12.04 64-bit is the recommended platform.
## Requirements
* OS with 64-bit or 32-bit architecture
* C++11 toolchain
* [Git](https://git-scm.com/)
* Node.js (4.x or above) (Can be installed via [nvm](https://github.com/creationix/nvm)).
* [npm](https://www.npmjs.com/) v3.10.5 or above (automatically bundled with Node.js)
* `npm -v` to check the version.
* `npm install -g npm` to upgrade if necessary.
* `npm config set python /usr/bin/python2 -g` to ensure that gyp uses python2.
* You might need to run this command as `sudo`, depending on how you have set up [npm](https://github.com/joyent/node/wiki/Installing-Node.js-via-package-manager#ubuntu-mint-elementary-os).
* development headers for [GNOME Keyring](https://wiki.gnome.org/Projects/GnomeKeyring)
* OS with 64-bit or 32-bit architecture
* C++11 toolchain
* Git
* Node.js 4.4.x or later (we recommend installing it via [nvm](https://github.com/creationix/nvm))
* npm 3.10.x or later (run `npm install -g npm`)
* Ensure node-gyp uses python2 (run `npm config set python /usr/bin/python2 -g`, use `sudo` if you didn't install node via nvm)
* Development headers for [GNOME Keyring](https://wiki.gnome.org/Projects/GnomeKeyring).
For more details, scroll down to find how to setup a specific Linux distro.
## Instructions
```sh
git clone https://github.com/atom/atom.git
cd atom
script/build
```
To also install the newly built application, use `--create-debian-package` or `--create-rpm-package` and then install the generated package via the system package manager.
### `script/build` Options
* `--compress-artifacts`: zips the generated application as `out/atom-{arch}.tar.gz`.
* `--create-debian-package`: creates a .deb package as `out/atom-{arch}.deb`
* `--create-rpm-package`: creates a .rpm package as `out/atom-{arch}.rpm`
* `--install`: installs the application in `/usr/local/`.
### Ubuntu / Debian
* `sudo apt-get install build-essential git libgnome-keyring-dev fakeroot`
* Install Node.js and npm:
* Install [nvm](https://github.com/creationix/nvm).
* Run `nvm install 4` to install Node 4.x.
* Run `npm install -g npm` to upgrade to the latest npm.
* You may need to install a newer C++ compiler with C++11 support if script/bootstrap has errors:
```sh
sudo add-apt-repository ppa:ubuntu-toolchain-r/test
sudo apt-get update
sudo apt-get install gcc-5 g++-5
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 80 --slave /usr/bin/g++ g++ /usr/bin/g++-5
sudo update-alternatives --config gcc # choose gcc-5 from the list
```
* Install GNOME headers and other basic prerequisites:
```sh
sudo apt-get install build-essential git libgnome-keyring-dev fakeroot rpm
```
* If `script/build` exits with an error, you may need to install a newer C++ compiler with C++11:
```sh
sudo add-apt-repository ppa:ubuntu-toolchain-r/test
sudo apt-get update
sudo apt-get install gcc-5 g++-5
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 80 --slave /usr/bin/g++ g++ /usr/bin/g++-5
sudo update-alternatives --config gcc # choose gcc-5 from the list
```
### Fedora / CentOS / RHEL
* `sudo dnf --assumeyes install make gcc gcc-c++ glibc-devel git-core libgnome-keyring-devel rpmdevtools`
* Instructions for [Node.js](https://nodejs.org/en/download/package-manager/#enterprise-linux-and-fedora).
### Arch
@ -49,79 +66,6 @@ Ubuntu LTS 12.04 64-bit is the recommended platform.
* `sudo zypper install nodejs nodejs-devel make gcc gcc-c++ glibc-devel git-core libgnome-keyring-devel rpmdevtools`
## Instructions
If you have problems with permissions don't forget to prefix with `sudo`
1. Clone the Atom repository:
```sh
git clone https://github.com/atom/atom
cd atom
```
2. Checkout the latest Atom release:
```sh
git fetch -p
git checkout $(git describe --tags `git rev-list --tags --max-count=1`)
```
3. Build Atom:
```sh
script/build
```
This will create the atom application at `out/Atom`.
4. Install the `atom` and `apm` commands to `/usr/local/bin` by executing:
```sh
sudo script/grunt install
```
To use the newly installed Atom, quit and restart all running Atom instances.
5. *Optionally*, you may generate distributable packages of Atom at `out`. Currently, `.deb` and `.rpm` package types are supported, as well as a `.tar.gz` archive. To create a `.deb` package run:
```sh
script/grunt mkdeb
```
To create a `.rpm` package run
```sh
script/grunt mkrpm
```
To create a `.tar.gz` archive run
```sh
script/grunt mktar
```
## Advanced Options
### Custom build directory
```sh
script/build --build-dir /build/atom/here
```
### Custom install directory
To install to a custom location from the standard build directory:
```sh
sudo script/grunt install --install-dir /install/atom/here
```
If you customized your build directory as described above:
```sh
sudo script/grunt install --build-dir /build/atom/here --install-dir /install/atom/here
```
## Troubleshooting
@ -150,15 +94,12 @@ See also [#2082](https://github.com/atom/atom/issues/2082).
### /usr/bin/env: node: No such file or directory
If you get this notice when attempting to `script/build`, you either do not
have Node.js installed, or node isn't identified as Node.js on your machine.
If it's the latter, entering `sudo ln -s /usr/bin/nodejs /usr/bin/node` into
your terminal may fix the issue.
#### You can also use Alternatives
On some variants (mostly Debian based distros) it's preferable for you to use
Alternatives so that changes to the binary paths can be fixed or altered easily:
If you get this notice when attempting to run any script, you either do not have
Node.js installed, or node isn't identified as Node.js on your machine. If it's
the latter, this might be caused by installing Node.js via the distro package
manager and not nvm, so entering `sudo ln -s /usr/bin/nodejs /usr/bin/node` into
your terminal may fix the issue. On some variants (mostly Debian based distros)
you can use `update-alternatives` too:
```sh
sudo update-alternatives --install /usr/bin/node node /usr/bin/nodejs 1 --slave /usr/bin/js js /usr/bin/nodejs
@ -176,9 +117,9 @@ you need to uninstall the system version of gyp.
On Fedora you would do the following:
```sh
sudo yum remove gyp
```
```sh
sudo yum remove gyp
```
### Linux build error reports in atom/atom
* Use [this search](https://github.com/atom/atom/search?q=label%3Abuild-error+label%3Alinux&type=Issues)

View File

@ -3,21 +3,25 @@
## Requirements
* macOS 10.8 or later
* [Node.js](https://nodejs.org/en/download/) (0.10.x or above)
* Node.js 4.4.x or later (we recommend installing it via [nvm](https://github.com/creationix/nvm))
* npm 3.10.x or later (run `npm install -g npm`)
* Command Line Tools for [Xcode](https://developer.apple.com/xcode/downloads/) (run `xcode-select --install` to install)
## Instructions
```sh
git clone https://github.com/atom/atom.git
cd atom
script/build # Creates application at /Applications/Atom.app
```
```sh
git clone https://github.com/atom/atom.git
cd atom
script/build
```
To also install the newly built application, use `script/build --install`.
### `script/build` Options
* `--install-dir` - The full path to the final built application (must include `.app` in the path), e.g. `script/build --install-dir /Users/username/full/path/to/Atom.app`
* `--build-dir` - Build the application in this directory.
* `--verbose` - Verbose mode. A lot more information output.
* `--code-sign`: signs the application with the GitHub certificate specified in `$ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL`.
* `--compress-artifacts`: zips the generated application as `out/atom-mac.zip`.
* `--install`: installs the application at `/Applications/Atom.app` for dev and stable versions or at `/Applications/Atom-Beta.app` for beta versions.
## Troubleshooting

View File

@ -2,53 +2,39 @@
## Requirements
### General
* [Node.js](https://nodejs.org/en/download/) v4.x
* [Python](https://www.python.org/downloads/) v2.7.x
* The python.exe must be available at `%SystemDrive%\Python27\python.exe`.
If it is installed elsewhere, you can create a symbolic link to the
directory containing the python.exe using:
`mklink /d %SystemDrive%\Python27 D:\elsewhere\Python27`
* Node.js 4.4.x or later
* Python v2.7.x
* The python.exe must be available at `%SystemDrive%\Python27\python.exe`. If it is installed elsewhere, you can create a symbolic link to the directory containing the python.exe using: `mklink /d %SystemDrive%\Python27 D:\elsewhere\Python27`
* Visual Studio, either:
* [Visual C++ Build Tools 2015](http://landinghub.visualstudio.com/visual-cpp-build-tools)
* [Visual Studio 2013 Update 5](https://www.visualstudio.com/en-us/downloads/download-visual-studio-vs) (Express Edition or better)
* [Visual Studio 2015](https://www.visualstudio.com/en-us/downloads/download-visual-studio-vs) (Community Edition or better)
### Visual Studio
You can use either:
* [Visual C++ Build Tools 2015](http://landinghub.visualstudio.com/visual-cpp-build-tools)
* [Visual Studio 2013 Update 5](https://www.visualstudio.com/en-us/downloads/download-visual-studio-vs) (Express Edition or better)
* [Visual Studio 2015](https://www.visualstudio.com/en-us/downloads/download-visual-studio-vs) (Community Edition or better)
Whichever version you use, ensure that:
* The default installation folder is chosen so the build tools can find it
* If using Visual Studio make sure Visual C++ support is selected/installed
* If using Visual C++ Build Tools make sure Windows 8 SDK is selected/installed
* A `git` command is in your path
* Set the `GYP_MSVS_VERSION` environment variable to the Visual Studio/Build Tools version (`2013` or `2015`) e.g. ``[Environment]::SetEnvironmentVariable("GYP_MSVS_VERSION", "2015", "User")`` in PowerShell or set it in Windows advanced system settings control panel.
Whichever version you use, ensure that:
* The default installation folder is chosen so the build tools can find it
* If using Visual Studio make sure Visual C++ support is selected/installed
* If using Visual C++ Build Tools make sure Windows 8 SDK is selected/installed
* A `git` command is in your path
* Set the `GYP_MSVS_VERSION` environment variable to the Visual Studio/Build Tools version (`2013` or `2015`) e.g. ``[Environment]::SetEnvironmentVariable("GYP_MSVS_VERSION", "2015", "User")`` in PowerShell or set it in Windows advanced system settings control panel.
## Instructions
You can run these commands using Command Prompt, PowerShell or Git Shell via [GitHub Desktop](https://desktop.github.com/). These instructions will assume the use of Bash from Git Shell - if you are using Command Prompt use a backslash instead: i.e. `script\build`.
You can run these commands using Command Prompt, PowerShell or Git Shell via [GitHub Desktop](https://desktop.github.com/). These instructions will assume the use of Command Prompt.
```bash
```
cd C:\
git clone https://github.com/atom/atom/
git clone https://github.com/atom/atom.git
cd atom
script\build
```
This will create the Atom application in the `out\Atom` folder as well as copy it to a subfolder of your user profile (e.g. `c:\Users\Bob`) called `AppData\Local\atom\app-dev`.
To also install the newly built application, use `script\build --create-windows-installer` and launch the generated installers.
### `script\build` Options
* `--install-dir` - Creates the final built application in this directory. Example (trailing slash is optional):
```bash
.\script\build --install-dir Z:\Some\Destination\Directory\
```
* `--build-dir` - Build the application in this directory. Example (trailing slash is optional):
```bash
.\script\build --build-dir Z:\Some\Temporary\Directory\
```
* `--no-install` - Skips the installation task after building.
* `--verbose` - Verbose mode. A lot more information output.
* `--code-sign`: signs the application with the GitHub certificate specified in `$WIN_P12KEY_URL`.
* `--compress-artifacts`: zips the generated application as `out/atom-windows.zip` (requires 7-zip).
* `--create-windows-installer`: creates an `.msi`, an `.exe` and a `.nupkg` installer in the `out/` directory.
* `--install`: installs the application in `%LOCALAPPDATA%\Atom\app-dev\`.
## Do I have to use GitHub Desktop?
@ -70,7 +56,7 @@ If none of this works, do install Github Desktop and use its Git Shell as it mak
* `msbuild.exe failed with exit code: 1`
* Ensure you have Visual C++ support installed. Go into Add/Remove Programs, select Visual Studio and press Modify and then check the Visual C++ box.
* `script\build` stops with no error or warning shortly after displaying the versions of node, npm and Python
* `script\build` stop with no error or warning shortly after displaying the versions of node, npm and Python
* Make sure that the path where you have checked out Atom does not include a space. e.g. use `c:\atom` and not `c:\my stuff\atom`
* `script\build` outputs only the Node.js and Python versions before returning
@ -94,11 +80,11 @@ If none of this works, do install Github Desktop and use its Git Shell as it mak
* Try setting the `GYP_MSVS_VERSION` environment variable to 2013 or 2015 depending on what version of Visual Studio you are running and then `script\clean` followed by `script\build` (re-open your command prompt or Powershell window if you set it using the GUI)
* `'node-gyp' is not recognized as an internal or external command, operable program or batch file.`
* Try running `npm install -g node-gyp`, and run `script/build` again.
* Try running `npm install -g node-gyp`, and run `script\build` again.
* Other `node-gyp` errors on first build attempt, even though the right Node.js and Python versions are installed.
* Do try the build command one more time, as experience shows it often works on second try in many of these cases.
### Windows build error reports in atom/atom
* If all fails, use [this search](https://github.com/atom/atom/search?q=label%3Abuild-error+label%3Awindows&type=Issues) to get a list of reports about build errors on Windows, and see if yours has already been reported.
* If it hasn't, please open a new issue with your Windows version, architecture (x86 or amd64), and a screenshot of your build output, including the Node.js and Python versions.
* If it hasn't, please open a new issue with your Windows version, architecture (x86 or amd64), and a screenshot of your build output, including the Node.js and Python versions.

View File

@ -237,10 +237,10 @@
]
'atom-pane': [
{type: 'separator'}
{label: 'Split Up', command: 'pane:split-up'}
{label: 'Split Down', command: 'pane:split-down'}
{label: 'Split Left', command: 'pane:split-left'}
{label: 'Split Right', command: 'pane:split-right'}
{label: 'Split Up', command: 'pane:split-up-and-copy-active-item'}
{label: 'Split Down', command: 'pane:split-down-and-copy-active-item'}
{label: 'Split Left', command: 'pane:split-left-and-copy-active-item'}
{label: 'Split Right', command: 'pane:split-right-and-copy-active-item'}
{label: 'Close Pane', command: 'pane:close'}
{type: 'separator'}
]

View File

@ -213,10 +213,10 @@
]
'atom-pane': [
{type: 'separator'}
{label: 'Split Up', command: 'pane:split-up'}
{label: 'Split Down', command: 'pane:split-down'}
{label: 'Split Left', command: 'pane:split-left'}
{label: 'Split Right', command: 'pane:split-right'}
{label: 'Split Up', command: 'pane:split-up-and-copy-active-item'}
{label: 'Split Down', command: 'pane:split-down-and-copy-active-item'}
{label: 'Split Left', command: 'pane:split-left-and-copy-active-item'}
{label: 'Split Right', command: 'pane:split-right-and-copy-active-item'}
{label: 'Close Pane', command: 'pane:close'}
{type: 'separator'}
]

View File

@ -216,10 +216,10 @@
]
'atom-pane': [
{type: 'separator'}
{label: 'Split Up', command: 'pane:split-up'}
{label: 'Split Down', command: 'pane:split-down'}
{label: 'Split Left', command: 'pane:split-left'}
{label: 'Split Right', command: 'pane:split-right'}
{label: 'Split Up', command: 'pane:split-up-and-copy-active-item'}
{label: 'Split Down', command: 'pane:split-down-and-copy-active-item'}
{label: 'Split Left', command: 'pane:split-left-and-copy-active-item'}
{label: 'Split Right', command: 'pane:split-right-and-copy-active-item'}
{label: 'Close Pane', command: 'pane:close'}
{type: 'separator'}
]

View File

@ -1,7 +1,7 @@
{
"name": "atom",
"productName": "Atom",
"version": "1.11.0-dev",
"version": "1.12.0-dev",
"description": "A hackable text editor for the 21st Century.",
"main": "./src/main-process/main.js",
"repository": {
@ -17,7 +17,7 @@
"async": "0.2.6",
"atom-keymap": "6.3.2",
"atom-ui": "0.4.1",
"babel-core": "^5.8.21",
"babel-core": "5.8.38",
"cached-run-in-this-context": "0.4.1",
"chai": "3.5.0",
"clear-cut": "^2.0.1",
@ -39,7 +39,7 @@
"key-path-helpers": "^0.4.0",
"less-cache": "0.23",
"line-top-index": "0.2.0",
"marked": "^0.3.5",
"marked": "^0.3.6",
"mocha": "2.5.1",
"normalize-package-data": "^2.0.0",
"nslog": "^3",
@ -58,7 +58,7 @@
"sinon": "1.17.4",
"source-map-support": "^0.3.2",
"temp": "0.8.1",
"text-buffer": "9.2.10",
"text-buffer": "9.2.12",
"typescript-simple": "1.0.0",
"underscore-plus": "^1.6.6",
"winreg": "^1.2.1",
@ -71,8 +71,8 @@
"atom-light-ui": "0.44.0",
"base16-tomorrow-dark-theme": "1.2.0",
"base16-tomorrow-light-theme": "1.2.0",
"one-dark-ui": "1.5.0",
"one-light-ui": "1.5.0",
"one-dark-ui": "1.6.0",
"one-light-ui": "1.6.0",
"one-dark-syntax": "1.3.0",
"one-light-syntax": "1.3.0",
"solarized-dark-syntax": "1.0.2",
@ -99,7 +99,7 @@
"git-diff": "1.1.0",
"go-to-line": "0.31.0",
"grammar-selector": "0.48.2",
"image-view": "0.58.3",
"image-view": "0.59.0",
"incompatible-packages": "0.26.1",
"keybinding-resolver": "0.35.0",
"line-ending-selector": "0.5.0",
@ -117,7 +117,7 @@
"symbols-view": "0.113.1",
"tabs": "0.101.0",
"timecop": "0.33.2",
"tree-view": "0.209.2",
"tree-view": "0.209.3",
"update-package-dependencies": "0.10.0",
"welcome": "0.35.1",
"whitespace": "0.33.0",

View File

@ -22,10 +22,24 @@ chmod 755 "%{buildroot}/<%= installDir %>/bin/<%= appFileName %>"
mkdir -p "%{buildroot}/<%= installDir %>/share/applications/"
cp "<%= appFileName %>.desktop" "%{buildroot}/<%= installDir %>/share/applications/"
for i in 1024 512 256 128 64 48 32 24 16; do
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/${i}x${i}/apps"
cp "icons/${i}.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/${i}x${i}/apps/<%= appFileName %>.png"
done
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/1024x1024/apps"
cp "icons/1024.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/1024x1024/apps/<%= appFileName %>.png"
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/512x512/apps"
cp "icons/512.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/512x512/apps/<%= appFileName %>.png"
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/256x256/apps"
cp "icons/256.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/256x256/apps/<%= appFileName %>.png"
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/128x128/apps"
cp "icons/128.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/128x128/apps/<%= appFileName %>.png"
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/64x64/apps"
cp "icons/64.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/64x64/apps/<%= appFileName %>.png"
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/48x48/apps"
cp "icons/48.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/48x48/apps/<%= appFileName %>.png"
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/32x32/apps"
cp "icons/32.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/32x32/apps/<%= appFileName %>.png"
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/24x24/apps"
cp "icons/24.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/24x24/apps/<%= appFileName %>.png"
mkdir -p "%{buildroot}/<%= installDir %>/share/icons/hicolor/16x16/apps"
cp "icons/16.png" "%{buildroot}/<%= installDir %>/share/icons/hicolor/16x16/apps/<%= appFileName %>.png"
%files
<%= installDir %>/bin/<%= appFileName %>

View File

@ -34,8 +34,6 @@
<string>AtomApplication</string>
<key>NSSupportsAutomaticGraphicsSwitching</key>
<true/>
<key>SUPublicDSAKeyFile</key>
<string>speakeasy.pem</string>
<key>SUScheduledCheckInterval</key>
<string>3600</string>
<key>CFBundleURLTypes</key>

View File

@ -1,20 +0,0 @@
-----BEGIN PUBLIC KEY-----
MIIDOjCCAi0GByqGSM44BAEwggIgAoIBAQCEh+j0nKWTw7soK6w3uk9PzPGVBksk
wDIaA+d+1CHJY9qhjp7OjAlSOl6nrUlGHzU87DRmBlwYZONAzDZnYpLi7zmPVASg
Xk+AmuqzqahTKtwodJp7R/Aq/lCbB2tXTXOxVo+Jya1BQbfd0wWXJFUlD/xTvrgu
zrtw6VYBvaRu8jCjHAJNZn0CO80igj1ZNxRqmmz1Rkt1tT0KBBfGBTNzXeBmGKHN
bVIKW7zImgfm+UQky+WFei1dqcfWOyfrHIYa3Qn1Nes48SBdrolvfvrChlSpqgEN
wxFW9aoognS1UJTu350AQb2NwOOSQRsR++y3iJp+60nBSDZu7sjNN9etAhUAvqki
JOjBjooRd2odMh7imICHQ3kCggEATwa6W0s2xrolPRpwWZS8ORUNDgEI4eOIvonq
O2qZgwD21zUQOsFjLMbWn0cCtrORr7iM8pFg8Yn8dSccpqs+2cM4uFZAycKXf6w3
jIvV6M3IPQuUSqVFZtqUVuteGTEuAHZKIrXE05P4aJXHLjqSC9JuaXNRm9q7OW7m
rwsoAFyfkKqbtl5Ch+WZ21CE4J+ByTfVwVU4XLiOtce6NABSDWNJsF9fIoFCZCDc
uumLllDJysD8S6aBNhOjNMHPmeIpZBXT23zHH5du/blcEyBbVF3a2ntgudfJmyln
T178CIEUSSjcbz9JyAhhK7OfNlzKhRiO1c4Y3XaZIniLGjF5DwOCAQUAAoIBABGZ
mfuHBW89ub19iICE//VbB91m2f0nUvHk8vE4vvAK8AdD91GODPJr4DU0kJM6ne8r
ohvZgokgDRkGAEceX/nVoG0RLq9T15Xr2qedWVwAffpU10iV9mYwbhHqUKPtG8cj
GW0cDdSI+0oG6UEyn8aQ5p93YEm5N6lq4rWKpxXb/gkrIla4sJJP8VHOOKmo6l1H
AKVIfofiaNAQShu72WVCCurWaoVTUEliEBhy3WlcjuKXEuoL1lpNxyqkt7mf6w71
6y2+Nh+XUTiFoTIVhk/CH0z+BQTneWEALvfTFzDae+a42rPAisKlt+Gbe7zopnVA
kcQwM0lLzgwx4T1DV3s=
-----END PUBLIC KEY-----

View File

@ -1,147 +1,30 @@
#!/usr/bin/env node
var fs = require('fs');
var verifyRequirements = require('./utils/verify-requirements');
var safeExec = require('./utils/child-process-wrapper.js').safeExec;
var path = require('path');
'use strict'
var t0, t1
const cleanDependencies = require('./lib/clean-dependencies')
const deleteMsbuildFromPath = require('./lib/delete-msbuild-from-path')
const dependenciesFingerprint = require('./lib/dependencies-fingerprint')
const installApm = require('./lib/install-apm')
const installAtomDependencies = require('./lib/install-atom-dependencies')
const installScriptDependencies = require('./lib/install-script-dependencies')
const verifyMachineRequirements = require('./lib/verify-machine-requirements')
// Executes an array of commands one by one.
function executeCommands(commands, done, index) {
if (index != undefined) {
t1 = Date.now()
console.log("=> Took " + (t1 - t0) + "ms.");
console.log();
}
process.on('unhandledRejection', function (e) {
console.error(e.stack || e)
process.exit(1)
})
index = (index == undefined ? 0 : index);
if (index < commands.length) {
var command = commands[index];
if (command.message)
console.log(command.message);
var options = null;
if (typeof command !== 'string') {
options = command.options;
command = command.command;
}
t0 = Date.now()
safeExec(command, options, executeCommands.bind(this, commands, done, index + 1));
}
else
done(null);
verifyMachineRequirements()
if (dependenciesFingerprint.isOutdated()) {
cleanDependencies()
}
function bootstrap() {
var apmInstallPath = path.resolve(__dirname, '..', 'apm');
if (!fs.existsSync(apmInstallPath))
fs.mkdirSync(apmInstallPath);
if (!fs.existsSync(path.join(apmInstallPath, 'node_modules')))
fs.mkdirSync(path.join(apmInstallPath, 'node_modules'));
if (process.platform === 'win32') deleteMsbuildFromPath()
var apmPath = path.resolve(__dirname, '..', 'apm', 'node_modules', 'atom-package-manager', 'bin', 'apm')
var apmFlags = process.env.JANKY_SHA1 || process.argv.indexOf('--no-color') !== -1 ? ' --no-color' : '';
installScriptDependencies()
installApm()
installAtomDependencies()
var npmPath = path.resolve(__dirname, '..', 'build', 'node_modules', '.bin', 'npm');
var initialNpmCommand = fs.existsSync(npmPath) ? npmPath : 'npm';
var npmFlags = ' --userconfig=' + path.resolve(__dirname, '..', 'build', '.npmrc') + ' ';
var packagesToDedupe = [
'abbrev',
'amdefine',
'atom-space-pen-views',
'cheerio',
'domelementtype',
'fs-plus',
'grim',
'highlights',
'humanize-plus',
'iconv-lite',
'inherits',
'loophole',
'oniguruma',
'q',
'request',
'rimraf',
'roaster',
'season',
'sigmund',
'semver',
'through',
'temp'
];
process.env.ATOM_RESOURCE_PATH = path.resolve(__dirname, '..');
var buildInstallCommand = initialNpmCommand + npmFlags + 'install';
var buildInstallOptions = {cwd: path.resolve(__dirname, '..', 'build')};
var apmInstallCommand = npmPath + npmFlags + '--target=4.4.5 --global-style ' + 'install';
var apmInstallOptions = {cwd: apmInstallPath};
var moduleInstallCommand = apmPath + ' install' + apmFlags;
var dedupeApmCommand = apmPath + ' dedupe' + apmFlags;
var moduleInstallEnv = {};
for (var e in process.env) {
moduleInstallEnv[e] = process.env[e];
}
// Set our target (Electron) version so that node-pre-gyp can download the
// proper binaries.
var electronVersion = require('../package.json').electronVersion;
moduleInstallEnv.npm_config_target = electronVersion;
// Force 32-bit modules on Windows.
// /cc https://github.com/atom/atom/issues/10450
if (process.platform === 'win32') {
moduleInstallEnv.npm_config_target_arch = 'ia32';
}
var moduleInstallOptions = {env: moduleInstallEnv};
if (process.argv.indexOf('--no-quiet') === -1 || process.env.ATOM_NOISY_BUILD) {
buildInstallCommand += ' --loglevel error';
apmInstallCommand += ' --loglevel error';
moduleInstallCommand += ' --loglevel error';
dedupeApmCommand += ' --quiet';
buildInstallOptions.ignoreStdout = true;
apmInstallOptions.ignoreStdout = true;
}
// apm ships with 32-bit node so make sure its native modules are compiled
// for a 32-bit target architecture
if (process.env.JANKY_SHA1 && process.platform === 'win32')
apmInstallCommand += ' --arch=ia32';
var commands = [
{
command: buildInstallCommand,
message: 'Installing build modules...',
options: buildInstallOptions
},
{
command: apmInstallCommand,
message: 'Installing apm...',
options: apmInstallOptions
},
{
command: moduleInstallCommand,
options: moduleInstallOptions
},
{
command: dedupeApmCommand + ' ' + packagesToDedupe.join(' '),
options: moduleInstallOptions
}
];
process.chdir(path.dirname(__dirname));
executeCommands(commands, process.exit);
}
verifyRequirements(function(error, successMessage) {
if (error) {
console.log(error);
process.exit(1);
}
console.log(successMessage);
bootstrap();
});
dependenciesFingerprint.write()

View File

@ -1,27 +1,103 @@
#!/usr/bin/env node
var cp = require('./utils/child-process-wrapper.js');
var runGrunt = require('./utils/run-grunt.js');
var path = require('path');
var fs = require('fs');
process.chdir(path.dirname(__dirname));
'use strict'
if (process.platform === 'win32') {
process.env['PATH'] = process.env['PATH']
.split(';')
.filter(function(p) {
if (fs.existsSync(path.resolve(p, 'msbuild.exe'))) {
console.log('Excluding "' + p + '" from PATH to avoid msbuild.exe mismatch')
return false;
// Run bootstrap first to ensure all the dependencies used later in this script
// are installed.
require('./bootstrap')
// Needed so we can require src/module-cache.coffee during generateModuleCache
require('coffee-script/register')
require('colors')
const argv = require('yargs')
.usage('Usage: $0 [options]')
.help('help')
.describe('code-sign', 'Code-sign executables (macOS and Windows only)')
.describe('create-windows-installer', 'Create installer (Windows only)')
.describe('create-debian-package', 'Create .deb package (Linux only)')
.describe('create-rpm-package', 'Create .rpm package (Linux only)')
.describe('compress-artifacts', 'Compress Atom binaries (and symbols on macOS)')
.describe('install', 'Install Atom')
.argv
const cleanOutputDirectory = require('./lib/clean-output-directory')
const codeSignOnMac = require('./lib/code-sign-on-mac')
const compressArtifacts = require('./lib/compress-artifacts')
const copyAssets = require('./lib/copy-assets')
const createDebianPackage = require('./lib/create-debian-package')
const createRpmPackage = require('./lib/create-rpm-package')
const createWindowsInstaller = require('./lib/create-windows-installer')
const downloadChromedriver = require('./lib/download-chromedriver')
const dumpSymbols = require('./lib/dump-symbols')
const generateAPIDocs = require('./lib/generate-api-docs')
const generateMetadata = require('./lib/generate-metadata')
const generateModuleCache = require('./lib/generate-module-cache')
const installApplication = require('./lib/install-application')
const packageApplication = require('./lib/package-application')
const prebuildLessCache = require('./lib/prebuild-less-cache')
const transpileBabelPaths = require('./lib/transpile-babel-paths')
const transpileCoffeeScriptPaths = require('./lib/transpile-coffee-script-paths')
const transpileCsonPaths = require('./lib/transpile-cson-paths')
const transpilePegJsPaths = require('./lib/transpile-peg-js-paths')
process.on('unhandledRejection', function (e) {
console.error(e.stack || e)
process.exit(1)
})
cleanOutputDirectory()
copyAssets()
transpileBabelPaths()
transpileCoffeeScriptPaths()
transpileCsonPaths()
transpilePegJsPaths()
generateModuleCache()
prebuildLessCache()
generateMetadata()
generateAPIDocs()
downloadChromedriver()
dumpSymbols()
.then(packageApplication)
.then(packagedAppPath => {
if (process.platform === 'darwin') {
if (argv.codeSign) {
codeSignOnMac(packagedAppPath)
} else {
return true;
console.log('Skipping code-signing. Specify the --code-sign option to perform code-signing'.gray)
}
} else if (process.platform === 'win32') {
if (argv.createWindowsInstaller) {
return createWindowsInstaller(packagedAppPath, argv.codeSign).then(() => packagedAppPath)
}
else {
console.log('Skipping creating installer. Specify the --create-windows-installer option to create a Squirrel-based Windows installer. Code-signing was skipped too.'.gray)
}
} else if (process.platform === 'linux') {
if (argv.createDebianPackage) {
createDebianPackage(packagedAppPath)
} else {
console.log('Skipping creating debian package. Specify the --create-debian-package option to create it.'.gray)
}
})
.join(';');
}
cp.safeExec('node script/bootstrap', function() {
// build/node_modules/.bin/grunt "$@"
var args = process.argv.slice(2);
runGrunt(args, process.exit);
});
if (argv.createRpmPackage) {
createRpmPackage(packagedAppPath)
} else {
console.log('Skipping creating rpm package. Specify the --create-rpm-package option to create it.'.gray)
}
}
return Promise.resolve(packagedAppPath)
}).then(packagedAppPath => {
if (argv.compressArtifacts) {
compressArtifacts(packagedAppPath)
} else {
console.log('Skipping artifacts compression. Specify the --compress-artifacts option to compress Atom binaries (and symbols on macOS)'.gray)
}
if (argv.install) {
installApplication(packagedAppPath)
} else {
console.log('Skipping installation. Specify the --install option to install Atom'.gray)
}
})

View File

@ -1,116 +1,2 @@
#!/usr/bin/env node
var cp = require('./utils/child-process-wrapper.js');
var crypto = require('crypto')
var fingerprint = require('./utils/fingerprint')
var fs = require('fs');
var path = require('path');
process.chdir(path.dirname(__dirname));
var homeDir = process.platform == 'win32' ? process.env.USERPROFILE : process.env.HOME;
function loadEnvironmentVariables(filePath) {
try {
var lines = fs.readFileSync(filePath, 'utf8').trim().split('\n');
for (i in lines) {
var parts = lines[i].split('=');
var key = parts[0].trim();
var value = parts[1].trim().substr(1, parts[1].length - 2);
process.env[key] = value;
}
} catch(error) {
console.error("Failed to load environment variables: " + filePath, error.code);
}
}
function readEnvironmentVariables() {
if (process.env.JANKY_SHA1) {
if (process.platform === 'win32') {
loadEnvironmentVariables(path.resolve('/jenkins/config/atomcredentials'));
} else if (process.platform === 'darwin') {
loadEnvironmentVariables('/var/lib/jenkins/config/atomcredentials');
loadEnvironmentVariables('/var/lib/jenkins/config/xcodekeychain');
}
}
}
function setEnvironmentVariables() {
if (process.platform === 'linux') {
// Use Clang for building native code, the GCC on Precise is too old.
process.env.CC = 'clang';
process.env.CXX = 'clang++';
process.env.npm_config_clang = '1';
}
}
function removeNodeModules() {
if (fingerprint.fingerprintMatches()) {
console.log('node_modules matches current fingerprint ' + fingerprint.fingerprint() + ' - not removing')
return
}
var fsPlus;
try {
fsPlus = require('fs-plus');
} catch (error) {
return;
}
try {
fsPlus.removeSync(path.resolve(__dirname, '..', 'node_modules'));
fsPlus.removeSync(path.resolve(__dirname, '..', 'apm', 'node_modules'));
} catch (error) {
console.error(error.message);
process.exit(1);
}
}
function removeTempFolders() {
var fsPlus;
try {
fsPlus = require('fs-plus');
} catch (error) {
return;
}
var temp = require('os').tmpdir();
if (!fsPlus.isDirectorySync(temp))
return;
var deletedFolders = 0;
fsPlus.readdirSync(temp).filter(function(folderName) {
return folderName.indexOf('npm-') === 0;
}).forEach(function(folderName) {
try {
fsPlus.removeSync(path.join(temp, folderName));
deletedFolders++;
} catch (error) {
console.error("Failed to delete npm temp folder: " + error.message);
}
});
if (deletedFolders > 0)
console.log("Deleted " + deletedFolders + " npm folders from temp directory");
}
readEnvironmentVariables();
setEnvironmentVariables();
removeNodeModules();
removeTempFolders();
cp.safeExec.bind(global, 'npm install npm --loglevel error', {cwd: path.resolve(__dirname, '..', 'build')}, function() {
cp.safeExec.bind(global, 'node script/bootstrap', function(error) {
if (error)
process.exit(1);
require('fs-plus').removeSync.bind(global, path.join(homeDir, '.atom'))
var async = require('async');
var gruntPath = path.join('build', 'node_modules', '.bin', 'grunt') + (process.platform === 'win32' ? '.cmd' : '');
var tasks = [
cp.safeExec.bind(global, 'git clean -dff -e node_modules'), // If we left them behind in removeNodeModules() they are OK to use
cp.safeExec.bind(global, gruntPath + ' ci --gruntfile build/Gruntfile.coffee --stack --no-color')
]
async.series(tasks, function(error) {
process.exit(error ? 1 : 0);
});
})();
})();
echo "Builds for this version of Atom no longer run on Janky."
echo "See https://github.com/atom/atom/pull/12410 for more information."

View File

@ -1,17 +0,0 @@
#!/bin/bash
set -e
export ATOM_ACCESS_TOKEN=$BUILD_ATOM_LINUX_ACCESS_TOKEN
export BUILD_ATOM_RELEASES_S3_KEY=$BUILD_ATOM_LINUX_RELEASES_S3_KEY
export BUILD_ATOM_RELEASES_S3_SECRET=$BUILD_ATOM_LINUX_RELEASES_S3_SECRET
export BUILD_ATOM_RELEASES_S3_BUCKET=$BUILD_ATOM_LINUX_RELEASES_S3_BUCKET
rm -rf /tmp/.atom-nvm
git clone https://github.com/creationix/nvm.git /tmp/.atom-nvm
source /tmp/.atom-nvm/nvm.sh
nvm install 4.4.7
nvm use 4.4.7
npm install -g npm
script/cibuild

View File

@ -1,15 +0,0 @@
#!/bin/bash
set -e
docker build -t atom-rpm .
docker run \
--rm \
--env JANKY_SHA1="$JANKY_SHA1" \
--env JANKY_BRANCH="$JANKY_BRANCH" \
--env ATOM_ACCESS_TOKEN="$BUILD_ATOM_RPM_ACCESS_TOKEN" \
--env BUILD_ATOM_RELEASES_S3_KEY="$BUILD_ATOM_RPM_RELEASES_S3_KEY" \
--env BUILD_ATOM_RELEASES_S3_SECRET="$BUILD_ATOM_RPM_RELEASES_S3_SECRET" \
--env BUILD_ATOM_RELEASES_S3_BUCKET="$BUILD_ATOM_RPM_RELEASES_S3_BUCKET" \
atom-rpm /atom/script/rpmbuild
docker rmi atom-rpm

View File

@ -1,5 +0,0 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\cibuild" %*
) ELSE (
node "%~dp0\cibuild" %*
)

View File

@ -1,62 +1,13 @@
#!/usr/bin/env node
var childProcess = require('./utils/child-process-wrapper.js');
var fs = require('fs');
var path = require('path');
var os = require('os');
var isWindows = process.platform === 'win32';
var productName = require('../package.json').productName;
'use strict'
process.chdir(path.dirname(__dirname));
var home = process.env[isWindows ? 'USERPROFILE' : 'HOME'];
var tmpdir = os.tmpdir();
const cleanCaches = require('./lib/clean-caches')
const cleanDependencies = require('./lib/clean-dependencies')
const cleanOutputDirectory = require('./lib/clean-output-directory')
const killRunningAtomInstances = require('./lib/kill-running-atom-instances')
// Windows: Use START as a way to ignore error if Atom.exe isnt running
childProcess.safeExec(isWindows ? `START taskkill /F /IM ${productName}.exe` : `pkill -9 ${productName} || true`);
var pathsToRemove = [
[__dirname, '..', 'node_modules'],
[__dirname, '..', 'build', 'node_modules'],
[__dirname, '..', 'apm', 'node_modules'],
[__dirname, '..', 'atom-shell'],
[__dirname, '..', 'electron'],
[__dirname, '..', 'out'],
[home, '.atom', '.node-gyp'],
[home, '.atom', 'storage'],
[home, '.atom', '.apm'],
[home, '.atom', '.npm'],
[home, '.atom', 'compile-cache'],
[home, '.atom', 'atom-shell'],
[home, '.atom', 'electron'],
[tmpdir, 'atom-build'],
[tmpdir, 'atom-cached-atom-shells'],
].map(function(pathSegments) {
return path.resolve.apply(null, pathSegments);
});
pathsToRemove.forEach(function(pathToRemove) {
if (fs.existsSync(pathToRemove)) {
removePath(pathToRemove);
}
});
function removePath(pathToRemove) {
if (isWindows) {
removePathOnWindows(pathToRemove);
} else {
childProcess.safeExec('rm -rf ' + pathToRemove);
}
}
// Windows has a 260-char path limit for rmdir etc. Just recursively delete in Node.
function removePathOnWindows(folderPath) {
fs.readdirSync(folderPath).forEach(function(entry, index) {
var entryPath = path.join(folderPath, entry);
if (fs.lstatSync(entryPath).isDirectory()) {
removePathOnWindows(entryPath);
} else {
fs.unlinkSync(entryPath);
}
});
fs.rmdirSync(folderPath);
};
killRunningAtomInstances()
cleanDependencies()
cleanCaches()
cleanOutputDirectory()

51
script/config.js Normal file
View File

@ -0,0 +1,51 @@
// This module exports paths, names, and other metadata that is referenced
// throughout the build.
'use strict'
const fs = require('fs')
const path = require('path')
const repositoryRootPath = path.resolve(__dirname, '..')
const apmRootPath = path.join(repositoryRootPath, 'apm')
const scriptRootPath = path.join(repositoryRootPath, 'script')
const buildOutputPath = path.join(repositoryRootPath, 'out')
const docsOutputPath = path.join(repositoryRootPath, 'docs', 'output')
const intermediateAppPath = path.join(buildOutputPath, 'app')
const symbolsPath = path.join(buildOutputPath, 'symbols')
const electronDownloadPath = path.join(repositoryRootPath, 'electron')
const homeDirPath = process.env.HOME || process.env.USERPROFILE
const atomHomeDirPath = path.join(homeDirPath, '.atom')
const appMetadata = require(path.join(repositoryRootPath, 'package.json'))
const apmMetadata = require(path.join(apmRootPath, 'package.json'))
const channel = getChannel()
module.exports = {
appMetadata, apmMetadata, channel,
repositoryRootPath, apmRootPath, scriptRootPath,
buildOutputPath, docsOutputPath, intermediateAppPath, symbolsPath,
electronDownloadPath, atomHomeDirPath, homeDirPath,
getApmBinPath, getNpmBinPath
}
function getChannel () {
if (appMetadata.version.match(/dev/)) {
return 'dev'
} else if (appMetadata.version.match(/beta/)) {
return 'beta'
} else {
return 'stable'
}
}
function getApmBinPath () {
const apmBinName = process.platform === 'win32' ? 'apm.cmd' : 'apm'
return path.join(apmRootPath, 'node_modules', 'atom-package-manager', 'bin', apmBinName)
}
function getNpmBinPath () {
const npmBinName = process.platform === 'win32' ? 'npm.cmd' : 'npm'
const localNpmBinPath = path.resolve(repositoryRootPath, 'script', 'node_modules', '.bin', npmBinName)
return fs.existsSync(localNpmBinPath) ? localNpmBinPath : npmBinName
}

View File

@ -1,29 +0,0 @@
#!/usr/bin/env node
var fingerprint = require('./utils/fingerprint')
var fs = require('fs')
var path = require('path')
if (!fs.existsSync(path.resolve(__dirname, '..', 'node_modules', '.atom-ci-fingerprint'))) {
return
}
if (fingerprint.fingerprintMatches()) {
console.log('node_modules matches current fingerprint ' + fingerprint.fingerprint() + ' - not removing')
return
}
var fsPlus
try {
fsPlus = require('fs-plus')
} catch (error) {
console.log(error.message)
return
}
try {
fsPlus.removeSync(path.resolve(__dirname, '..', 'node_modules'))
fsPlus.removeSync(path.resolve(__dirname, '..', 'apm', 'node_modules'))
} catch (error) {
console.error(error.message)
process.exit(1)
}

View File

@ -1,2 +0,0 @@
#!/usr/bin/env node
require('./utils/fingerprint').writeFingerprint()

View File

@ -1,6 +0,0 @@
#!/usr/bin/env node
var runGrunt = require('./utils/run-grunt.js');
// build/node_modules/.bin/grunt "$@"
var args = process.argv.slice(2);
runGrunt(args, process.exit);

View File

@ -1,5 +0,0 @@
@IF EXIST "%~dp0\node.exe" (
"%~dp0\node.exe" "%~dp0\grunt" %*
) ELSE (
node "%~dp0\grunt" %*
)

View File

@ -0,0 +1,27 @@
'use strict'
const fs = require('fs-extra')
const os = require('os')
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
const cachePaths = [
path.join(CONFIG.repositoryRootPath, 'electron'),
path.join(CONFIG.homeDirPath, '.atom', '.node-gyp'),
path.join(CONFIG.homeDirPath, '.atom', 'storage'),
path.join(CONFIG.homeDirPath, '.atom', '.apm'),
path.join(CONFIG.homeDirPath, '.atom', '.npm'),
path.join(CONFIG.homeDirPath, '.atom', 'compile-cache'),
path.join(CONFIG.homeDirPath, '.atom', 'atom-shell'),
path.join(CONFIG.homeDirPath, '.atom', 'electron'),
path.join(os.tmpdir(), 'atom-build'),
path.join(os.tmpdir(), 'atom-cached-atom-shells')
]
for (let path of cachePaths) {
console.log(`Cleaning ${path}`)
fs.removeSync(path)
}
}

View File

@ -0,0 +1,22 @@
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
// We can't require fs-extra if `script/bootstrap` has never been run, because
// it's a third party module. This is okay because cleaning dependencies only
// makes sense if dependencies have been installed at least once.
const fs = require('fs-extra')
const apmDependenciesPath = path.join(CONFIG.apmRootPath, 'node_modules')
console.log(`Cleaning ${apmDependenciesPath}`);
fs.removeSync(apmDependenciesPath)
const atomDependenciesPath = path.join(CONFIG.repositoryRootPath, 'node_modules')
console.log(`Cleaning ${atomDependenciesPath}`);
fs.removeSync(atomDependenciesPath)
const scriptDependenciesPath = path.join(CONFIG.scriptRootPath, 'node_modules')
console.log(`Cleaning ${scriptDependenciesPath}`);
fs.removeSync(scriptDependenciesPath)
}

View File

@ -0,0 +1,10 @@
const fs = require('fs-extra')
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
if (fs.existsSync(CONFIG.buildOutputPath)) {
console.log(`Cleaning ${CONFIG.buildOutputPath}`)
fs.removeSync(CONFIG.buildOutputPath)
}
}

View File

@ -0,0 +1,44 @@
const downloadFileFromGithub = require('./download-file-from-github')
const fs = require('fs-extra')
const os = require('os')
const path = require('path')
const spawnSync = require('./spawn-sync')
module.exports = function (packagedAppPath) {
if (!process.env.ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL) {
console.log('Skipping code signing because the ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL environment variable is not defined'.gray)
return
}
try {
const certPath = path.join(os.tmpdir(), 'mac.p12')
downloadFileFromGithub(process.env.ATOM_MAC_CODE_SIGNING_CERT_DOWNLOAD_URL, certPath)
console.log(`Unlocking keychain ${process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN}`)
const unlockArgs = ['unlock-keychain']
// For signing on local workstations, password could be entered interactively
if (process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN_PASSWORD) {
unlockArgs.push('-p', process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN_PASSWORD)
}
unlockArgs.push(process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN)
spawnSync('security', unlockArgs, {stdio: 'inherit'})
console.log(`Importing certificate at ${certPath} into ${process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN} keychain`)
spawnSync('security', [
'import', certPath,
'-P', process.env.ATOM_MAC_CODE_SIGNING_CERT_PASSWORD,
'-k', process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN,
'-T', '/usr/bin/codesign'
])
console.log(`Code-signing application at ${packagedAppPath}`)
spawnSync('codesign', [
'--deep', '--force', '--verbose',
'--keychain', process.env.ATOM_MAC_CODE_SIGNING_KEYCHAIN,
'--sign', 'Developer ID Application: GitHub', packagedAppPath
], {stdio: 'inherit'})
} finally {
console.log(`Deleting certificate at ${certPath}`)
fs.removeSync(certPath)
}
}

View File

@ -0,0 +1,55 @@
'use strict'
const fs = require('fs-extra')
const path = require('path')
const spawnSync = require('./spawn-sync')
const CONFIG = require('../config')
module.exports = function (packagedAppPath) {
let appArchiveName
if (process.platform === 'darwin') {
appArchiveName = 'atom-mac.zip'
} else if (process.platform === 'win32') {
appArchiveName = 'atom-windows.zip'
} else {
let arch
if (process.arch === 'ia32') {
arch = 'i386'
} else if (process.arch === 'x64') {
arch = 'amd64'
} else {
arch = process.arch
}
appArchiveName = `atom-${arch}.tar.gz`
}
const appArchivePath = path.join(CONFIG.buildOutputPath, appArchiveName)
compress(packagedAppPath, appArchivePath)
if (process.platform === 'darwin') {
const symbolsArchivePath = path.join(CONFIG.buildOutputPath, 'atom-mac-symbols.zip')
compress(CONFIG.symbolsPath, symbolsArchivePath)
}
}
function compress (inputDirPath, outputArchivePath) {
if (fs.existsSync(outputArchivePath)) {
console.log(`Deleting "${outputArchivePath}"`)
fs.removeSync(outputArchivePath)
}
console.log(`Compressing "${inputDirPath}" to "${outputArchivePath}"`)
let compressCommand, compressArguments
if (process.platform === 'darwin') {
compressCommand = 'zip'
compressArguments = ['-r', '--symlinks']
} else if (process.platform === 'win32') {
compressCommand = '7z.exe'
compressArguments = ['a', '-r']
} else {
compressCommand = 'tar'
compressArguments = ['caf']
}
compressArguments.push(outputArchivePath, path.basename(inputDirPath))
spawnSync(compressCommand, compressArguments, {cwd: path.dirname(inputDirPath)})
}

37
script/lib/copy-assets.js Normal file
View File

@ -0,0 +1,37 @@
// This module exports a function that copies all the static assets into the
// appropriate location in the build output directory.
'use strict'
const path = require('path')
const fs = require('fs-extra')
const CONFIG = require('../config')
const glob = require('glob')
const includePathInPackagedApp = require('./include-path-in-packaged-app')
module.exports = function () {
console.log(`Copying assets to ${CONFIG.intermediateAppPath}`);
let srcPaths = [
path.join(CONFIG.repositoryRootPath, 'dot-atom'),
path.join(CONFIG.repositoryRootPath, 'exports'),
path.join(CONFIG.repositoryRootPath, 'node_modules'),
path.join(CONFIG.repositoryRootPath, 'package.json'),
path.join(CONFIG.repositoryRootPath, 'static'),
path.join(CONFIG.repositoryRootPath, 'src'),
path.join(CONFIG.repositoryRootPath, 'vendor')
]
srcPaths = srcPaths.concat(glob.sync(path.join(CONFIG.repositoryRootPath, 'spec', '*.*'), {ignore: path.join('**', '*-spec.*')}))
for (let srcPath of srcPaths) {
fs.copySync(srcPath, computeDestinationPath(srcPath), {filter: includePathInPackagedApp})
}
fs.copySync(
path.join(CONFIG.repositoryRootPath, 'resources', 'app-icons', CONFIG.channel, 'png', '1024.png'),
path.join(CONFIG.intermediateAppPath, 'resources', 'atom.png')
)
}
function computeDestinationPath (srcPath) {
const relativePath = path.relative(CONFIG.repositoryRootPath, srcPath)
return path.join(CONFIG.intermediateAppPath, relativePath)
}

View File

@ -0,0 +1,116 @@
'use strict'
const fs = require('fs-extra')
const os = require('os')
const path = require('path')
const spawnSync = require('./spawn-sync')
const template = require('lodash.template')
const CONFIG = require('../config')
module.exports = function (packagedAppPath) {
console.log(`Creating Debian package for "${packagedAppPath}"`)
const atomExecutableName = CONFIG.channel === 'beta' ? 'atom-beta' : 'atom'
const apmExecutableName = CONFIG.channel === 'beta' ? 'apm-beta' : 'apm'
const appName = CONFIG.channel === 'beta' ? 'Atom Beta' : 'Atom'
const appDescription = CONFIG.appMetadata.description
const appVersion = CONFIG.appMetadata.version
let arch
if (process.arch === 'ia32') {
arch = 'i386'
} else if (process.arch === 'x64') {
arch = 'amd64'
} else if (process.arch === 'ppc') {
arch = 'powerpc'
} else {
arch = process.arch
}
const outputDebianPackageFilePath = path.join(CONFIG.buildOutputPath, `atom-${arch}.deb`)
const debianPackageDirPath = path.join(os.tmpdir(), path.basename(packagedAppPath))
const debianPackageConfigPath = path.join(debianPackageDirPath, 'DEBIAN')
const debianPackageInstallDirPath = path.join(debianPackageDirPath, 'usr')
const debianPackageBinDirPath = path.join(debianPackageInstallDirPath, 'bin')
const debianPackageShareDirPath = path.join(debianPackageInstallDirPath, 'share')
const debianPackageAtomDirPath = path.join(debianPackageShareDirPath, atomExecutableName)
const debianPackageApplicationsDirPath = path.join(debianPackageShareDirPath, 'applications')
const debianPackageIconsDirPath = path.join(debianPackageShareDirPath, 'pixmaps')
const debianPackageLintianOverridesDirPath = path.join(debianPackageShareDirPath, 'lintian', 'overrides')
const debianPackageDocsDirPath = path.join(debianPackageShareDirPath, 'doc', atomExecutableName)
if (fs.existsSync(debianPackageDirPath)) {
console.log(`Deleting existing build dir for Debian package at "${debianPackageDirPath}"`)
fs.removeSync(debianPackageDirPath)
}
if (fs.existsSync(`${debianPackageDirPath}.deb`)) {
console.log(`Deleting existing Debian package at "${debianPackageDirPath}.deb"`)
fs.removeSync(`${debianPackageDirPath}.deb`)
}
if (fs.existsSync(debianPackageDirPath)) {
console.log(`Deleting existing Debian package at "${outputDebianPackageFilePath}"`)
fs.removeSync(debianPackageDirPath)
}
console.log(`Creating Debian package directory structure at "${debianPackageDirPath}"`)
fs.mkdirpSync(debianPackageDirPath)
fs.mkdirpSync(debianPackageConfigPath)
fs.mkdirpSync(debianPackageInstallDirPath)
fs.mkdirpSync(debianPackageShareDirPath)
fs.mkdirpSync(debianPackageApplicationsDirPath)
fs.mkdirpSync(debianPackageIconsDirPath)
fs.mkdirpSync(debianPackageLintianOverridesDirPath)
fs.mkdirpSync(debianPackageDocsDirPath)
fs.mkdirpSync(debianPackageBinDirPath)
console.log(`Copying "${packagedAppPath}" to "${debianPackageAtomDirPath}"`)
fs.copySync(packagedAppPath, debianPackageAtomDirPath)
fs.chmodSync(debianPackageAtomDirPath, '755')
console.log(`Copying binaries into "${debianPackageBinDirPath}"`)
fs.copySync(path.join(CONFIG.repositoryRootPath, 'atom.sh'), path.join(debianPackageBinDirPath, atomExecutableName))
fs.symlinkSync(
path.join('..', 'share', atomExecutableName, 'resources', 'app', 'apm', 'node_modules', '.bin', 'apm'),
path.join(debianPackageBinDirPath, apmExecutableName)
)
console.log(`Writing control file into "${debianPackageConfigPath}"`)
const packageSizeInKilobytes = spawnSync('du', ['-sk', packagedAppPath]).stdout.toString().split(/\s+/)[0]
const controlFileTemplate = fs.readFileSync(path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'debian', 'control.in'))
const controlFileContents = template(controlFileTemplate)({
appFileName: atomExecutableName, version: appVersion, arch: arch,
installedSize: packageSizeInKilobytes, description: appDescription
})
fs.writeFileSync(path.join(debianPackageConfigPath, 'control'), controlFileContents)
console.log(`Writing desktop entry file into "${debianPackageApplicationsDirPath}"`)
const desktopEntryTemplate = fs.readFileSync(path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'atom.desktop.in'))
const desktopEntryContents = template(desktopEntryTemplate)({
appName: appName, appFileName: atomExecutableName, description: appDescription,
installDir: '/usr', iconPath: atomExecutableName
})
fs.writeFileSync(path.join(debianPackageApplicationsDirPath, `${atomExecutableName}.desktop`), desktopEntryContents)
console.log(`Copying icon into "${debianPackageIconsDirPath}"`)
fs.copySync(
path.join(packagedAppPath, 'resources', 'app.asar.unpacked', 'resources', 'atom.png'),
path.join(debianPackageIconsDirPath, `${atomExecutableName}.png`)
)
console.log(`Copying license into "${debianPackageDocsDirPath}"`)
fs.copySync(
path.join(packagedAppPath, 'resources', 'LICENSE.md'),
path.join(debianPackageDocsDirPath, 'copyright')
)
console.log(`Copying lintian overrides into "${debianPackageLintianOverridesDirPath}"`)
fs.copySync(
path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'debian', 'lintian-overrides'),
path.join(debianPackageLintianOverridesDirPath, atomExecutableName)
)
console.log(`Generating .deb file from ${debianPackageDirPath}`)
spawnSync('fakeroot', ['dpkg-deb', '-b', debianPackageDirPath], {stdio: 'inherit'})
console.log(`Copying generated package into "${outputDebianPackageFilePath}"`)
fs.copySync(`${debianPackageDirPath}.deb`, outputDebianPackageFilePath)
}

View File

@ -0,0 +1,84 @@
'use strict'
const assert = require('assert')
const fs = require('fs-extra')
const os = require('os')
const path = require('path')
const spawnSync = require('./spawn-sync')
const template = require('lodash.template')
const CONFIG = require('../config')
module.exports = function (packagedAppPath) {
console.log(`Creating rpm package for "${packagedAppPath}"`)
const atomExecutableName = CONFIG.channel === 'beta' ? 'atom-beta' : 'atom'
const apmExecutableName = CONFIG.channel === 'beta' ? 'apm-beta' : 'apm'
const appName = CONFIG.channel === 'beta' ? 'Atom Beta' : 'Atom'
const appDescription = CONFIG.appMetadata.description
// RPM versions can't have dashes or tildes in them.
// (Ref.: https://twiki.cern.ch/twiki/bin/view/Main/RPMAndDebVersioning)
const appVersion = CONFIG.appMetadata.version.replace(/-/g, '.')
const rpmPackageDirPath = path.join(CONFIG.homeDirPath, 'rpmbuild')
const rpmPackageBuildDirPath = path.join(rpmPackageDirPath, 'BUILD')
const rpmPackageSourcesDirPath = path.join(rpmPackageDirPath, 'SOURCES')
const rpmPackageSpecsDirPath = path.join(rpmPackageDirPath, 'SPECS')
const rpmPackageRpmsDirPath = path.join(rpmPackageDirPath, 'RPMS')
const rpmPackageApplicationDirPath = path.join(rpmPackageBuildDirPath, appName)
const rpmPackageIconsDirPath = path.join(rpmPackageBuildDirPath, 'icons')
if (fs.existsSync(rpmPackageDirPath)) {
console.log(`Deleting existing rpm build directory at "${rpmPackageDirPath}"`)
fs.removeSync(rpmPackageDirPath)
}
console.log(`Creating rpm package directory structure at "${rpmPackageDirPath}"`)
fs.mkdirpSync(rpmPackageDirPath)
fs.mkdirpSync(rpmPackageBuildDirPath)
fs.mkdirpSync(rpmPackageSourcesDirPath)
fs.mkdirpSync(rpmPackageSpecsDirPath)
console.log(`Copying "${packagedAppPath}" to "${rpmPackageApplicationDirPath}"`)
fs.copySync(packagedAppPath, rpmPackageApplicationDirPath)
console.log(`Copying icons into "${rpmPackageIconsDirPath}"`)
fs.copySync(
path.join(CONFIG.repositoryRootPath, 'resources', 'app-icons', CONFIG.channel, 'png'),
rpmPackageIconsDirPath
)
console.log(`Writing rpm package spec file into "${rpmPackageSpecsDirPath}"`)
const rpmPackageSpecFilePath = path.join(rpmPackageSpecsDirPath, 'atom.spec')
const rpmPackageSpecsTemplate = fs.readFileSync(path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'redhat', 'atom.spec.in'))
const rpmPackageSpecsContents = template(rpmPackageSpecsTemplate)({
appName: appName, appFileName: atomExecutableName, apmFileName: apmExecutableName,
description: appDescription, installDir: '/usr', version: appVersion
})
fs.writeFileSync(rpmPackageSpecFilePath, rpmPackageSpecsContents)
console.log(`Writing desktop entry file into "${rpmPackageBuildDirPath}"`)
const desktopEntryTemplate = fs.readFileSync(path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'atom.desktop.in'))
const desktopEntryContents = template(desktopEntryTemplate)({
appName: appName, appFileName: atomExecutableName, description: appDescription,
installDir: '/usr', iconPath: atomExecutableName
})
fs.writeFileSync(path.join(rpmPackageBuildDirPath, `${atomExecutableName}.desktop`), desktopEntryContents)
console.log(`Copying atom.sh into "${rpmPackageBuildDirPath}"`)
fs.copySync(
path.join(CONFIG.repositoryRootPath, 'atom.sh'),
path.join(rpmPackageBuildDirPath, 'atom.sh')
)
console.log(`Generating .rpm package from "${rpmPackageDirPath}"`)
spawnSync('rpmbuild', ['-ba', '--clean', rpmPackageSpecFilePath])
for (let generatedArch of fs.readdirSync(rpmPackageRpmsDirPath)) {
const generatedArchDirPath = path.join(rpmPackageRpmsDirPath, generatedArch)
const generatedPackageFileNames = fs.readdirSync(generatedArchDirPath)
assert(generatedPackageFileNames.length === 1, 'Generated more than one rpm package')
const generatedPackageFilePath = path.join(generatedArchDirPath, generatedPackageFileNames[0])
const outputRpmPackageFilePath = path.join(CONFIG.buildOutputPath, `atom.${generatedArch}.rpm`)
console.log(`Copying "${generatedPackageFilePath}" into "${outputRpmPackageFilePath}"`)
fs.copySync(generatedPackageFilePath, outputRpmPackageFilePath)
}
}

View File

@ -0,0 +1,50 @@
'use strict'
const downloadFileFromGithub = require('./download-file-from-github')
const electronInstaller = require('electron-winstaller')
const fs = require('fs-extra')
const glob = require('glob')
const os = require('os')
const path = require('path')
const CONFIG = require('../config')
module.exports = function (packagedAppPath, codeSign) {
const options = {
appDirectory: packagedAppPath,
authors: 'GitHub Inc.',
iconUrl: `https://raw.githubusercontent.com/atom/atom/master/resources/app-icons/${CONFIG.channel}/atom.ico`,
loadingGif: path.join(CONFIG.repositoryRootPath, 'resources', 'win', 'loading.gif'),
outputDirectory: CONFIG.buildOutputPath,
remoteReleases: `https://atom.io/api/updates?version=${CONFIG.appMetadata.version}`,
setupIcon: path.join(CONFIG.repositoryRootPath, 'resources', 'app-icons', CONFIG.channel, 'atom.ico')
}
const certPath = path.join(os.tmpdir(), 'win.p12')
if (codeSign && process.env.WIN_P12KEY_URL) {
downloadFileFromGithub(process.env.WIN_P12KEY_URL, certPath)
options.certificateFile = certPath
options.certificatePassword = process.env.WIN_P12KEY_PASSWORD
} else {
console.log('Skipping code-signing. Specify the --code-sign option and provide a WIN_P12KEY_URL environment variable to perform code-signing'.gray)
}
const cleanUp = function () {
if (fs.existsSync(certPath)) {
console.log(`Deleting certificate at ${certPath}`)
fs.removeSync(certPath)
}
for (let nupkgPath of glob.sync(`${CONFIG.buildOutputPath}/*.nupkg`)) {
if (!nupkgPath.includes(CONFIG.appMetadata.version)) {
console.log(`Deleting downloaded nupkg for previous version at ${nupkgPath} to prevent it from being stored as an artifact`)
fs.removeSync(nupkgPath)
}
}
}
console.log(`Creating Windows Installer for ${packagedAppPath}`)
return electronInstaller.createWindowsInstaller(options).then(cleanUp, function (error) {
console.log(`Windows installer creation failed:\n${error}`)
cleanUp()
})
}

View File

@ -0,0 +1,19 @@
'use strict'
const fs = require('fs')
const path = require('path')
module.exports = function () {
process.env['PATH'] =
process.env['PATH']
.split(';')
.filter(function(p) {
if (fs.existsSync(path.join(p, 'msbuild.exe'))) {
console.log('Excluding "' + p + '" from PATH to avoid msbuild.exe mismatch that causes errors during module installation')
return false;
} else {
return true;
}
})
.join(';');
}

View File

@ -0,0 +1,28 @@
const crypto = require('crypto')
const fs = require('fs')
const path = require('path')
const CONFIG = require('../config')
const FINGERPRINT_PATH = path.join(CONFIG.repositoryRootPath, 'node_modules', '.dependencies-fingerprint')
module.exports = {
write: function () {
const fingerprint = this.compute()
fs.writeFileSync(FINGERPRINT_PATH, fingerprint)
console.log('Wrote Dependencies Fingerprint:', FINGERPRINT_PATH, fingerprint)
},
read: function () {
return fs.existsSync(FINGERPRINT_PATH) ? fs.readFileSync(FINGERPRINT_PATH, 'utf8') : null
},
isOutdated: function () {
const fingerprint = this.read()
return fingerprint ? fingerprint !== this.compute() : false
},
compute: function () {
// Include the electron minor version in the fingerprint since that changing requires a re-install
const electronVersion = CONFIG.appMetadata.electronVersion.replace(/\.\d+$/, '')
const apmVersion = CONFIG.apmMetadata.dependencies['atom-package-manager']
const body = electronVersion + apmVersion + process.platform + process.version
return crypto.createHash('sha1').update(body).digest('hex')
}
}

View File

@ -0,0 +1,56 @@
'use strict'
const assert = require('assert')
const downloadFileFromGithub = require('./download-file-from-github')
const fs = require('fs-extra')
const path = require('path')
const semver = require('semver')
const spawnSync = require('./spawn-sync')
const syncRequest = require('sync-request')
const CONFIG = require('../config')
module.exports = function () {
if (process.platform === 'darwin') {
// Chromedriver is only distributed with the first patch release for any given
// major and minor version of electron.
const electronVersion = semver.parse(CONFIG.appMetadata.electronVersion)
const electronVersionWithChromedriver = `${electronVersion.major}.${electronVersion.minor}.0`
const electronAssets = getElectronAssetsForVersion(electronVersionWithChromedriver)
const chromedriverAssets = electronAssets.filter(e => /chromedriver.*darwin-x64/.test(e.name))
assert(chromedriverAssets.length === 1, 'Found more than one chrome driver asset to download!')
const chromedriverAsset = chromedriverAssets[0]
const chromedriverZipPath = path.join(CONFIG.electronDownloadPath, `electron-${electronVersionWithChromedriver}-${chromedriverAsset.name}`)
if (!fs.existsSync(chromedriverZipPath)) {
downloadFileFromGithub(chromedriverAsset.url, chromedriverZipPath)
}
const chromedriverDirPath = path.join(CONFIG.electronDownloadPath, 'chromedriver')
unzipPath(chromedriverZipPath, chromedriverDirPath)
} else {
console.log('Skipping Chromedriver download because it is used only on macOS'.gray)
}
}
function getElectronAssetsForVersion (version) {
const releaseURL = `https://api.github.com/repos/electron/electron/releases/tags/v${version}`
const response = syncRequest('GET', releaseURL, {'headers': {'User-Agent': 'Atom Build'}})
if (response.statusCode === 200) {
const release = JSON.parse(response.body)
return release.assets.map(a => { return {name: a.name, url: a.browser_download_url} })
} else {
throw new Error(`Error getting assets for ${releaseURL}. HTTP Status ${response.statusCode}.`)
}
}
function unzipPath (inputPath, outputPath) {
if (fs.existsSync(outputPath)) {
console.log(`Removing "${outputPath}"`)
fs.removeSync(outputPath)
}
console.log(`Unzipping "${inputPath}" to "${outputPath}"`)
spawnSync('unzip', [inputPath, '-d', outputPath])
}

View File

@ -0,0 +1,19 @@
'use strict'
const fs = require('fs-extra')
const path = require('path')
const syncRequest = require('sync-request')
module.exports = function (downloadURL, destinationPath) {
console.log(`Dowloading file from GitHub Repository to ${destinationPath}`)
const response = syncRequest('GET', downloadURL, {
'headers': {'Accept': 'application/vnd.github.v3.raw', 'User-Agent': 'Atom Build'}
})
if (response.statusCode === 200) {
fs.mkdirpSync(path.dirname(destinationPath))
fs.writeFileSync(destinationPath, response.body)
} else {
throw new Error('Error downloading file. HTTP Status ' + response.statusCode + '.')
}
}

View File

@ -0,0 +1,44 @@
'use strict'
const fs = require('fs-extra')
const glob = require('glob')
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
if (process.platform === 'win32') {
console.log('Skipping symbol dumping because minidump is not supported on Windows'.gray)
return Promise.resolve()
} else {
console.log(`Dumping symbols in ${CONFIG.symbolsPath}`)
const binaryPaths = glob.sync(path.join(CONFIG.intermediateAppPath, 'node_modules', '**', '*.node'))
return Promise.all(binaryPaths.map(dumpSymbol))
}
}
function dumpSymbol (binaryPath) {
const minidump = require('minidump')
return new Promise(function (resolve, reject) {
minidump.dumpSymbol(binaryPath, function (error, content) {
if (error) {
console.error(error)
throw new Error(error)
} else {
const moduleLine = /MODULE [^ ]+ [^ ]+ ([0-9A-F]+) (.*)\n/.exec(content)
if (moduleLine.length !== 3) {
const errorMessage = `Invalid output when dumping symbol for ${binaryPath}`
console.error(errorMessage)
throw new Error(errorMessage)
} else {
const filename = moduleLine[2]
const symbolDirPath = path.join(CONFIG.symbolsPath, filename, moduleLine[1])
const symbolFilePath = path.join(symbolDirPath, `${filename}.sym`)
fs.mkdirpSync(symbolDirPath)
fs.writeFileSync(symbolFilePath)
resolve()
}
}
})
})
}

View File

@ -0,0 +1,19 @@
'use strict'
const glob = require('glob')
module.exports = function (globPaths) {
return Promise.all(globPaths.map(g => expandGlobPath(g))).then(paths => paths.reduce((a, b) => a.concat(b), []))
}
function expandGlobPath (globPath) {
return new Promise((resolve, reject) => {
glob(globPath, (error, paths) => {
if (error) {
reject(error)
} else {
resolve(paths)
}
})
})
}

View File

@ -0,0 +1,53 @@
'use strict'
const donna = require('donna')
const tello = require('tello')
const joanna = require('joanna')
const glob = require('glob')
const fs = require('fs-extra')
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
const generatedJSONPath = path.join(CONFIG.docsOutputPath, 'atom-api.json')
console.log(`Generating API docs at ${generatedJSONPath}`)
// Unfortunately, correct relative paths depend on a specific working
// directory, but this script should be able to run from anywhere, so we
// muck with the cwd temporarily.
const oldWorkingDirectoryPath = process.cwd()
process.chdir(CONFIG.repositoryRootPath)
const coffeeMetadata = donna.generateMetadata(['.'])[0]
const jsMetadata = joanna(glob.sync(`src/**/*.js`))
process.chdir(oldWorkingDirectoryPath)
const metadata = {
repository: coffeeMetadata.repository,
version: coffeeMetadata.version,
files: Object.assign(coffeeMetadata.files, jsMetadata.files)
}
const api = tello.digest([metadata])
Object.assign(api.classes, getAPIDocsForDependencies())
api.classes = sortObjectByKey(api.classes)
fs.mkdirpSync(CONFIG.docsOutputPath)
fs.writeFileSync(generatedJSONPath, JSON.stringify(api, null, 2))
}
function getAPIDocsForDependencies () {
const classes = {}
for (let apiJSONPath of glob.sync(`${CONFIG.repositoryRootPath}/node_modules/*/api.json`)) {
Object.assign(classes, require(apiJSONPath).classes)
}
return classes
}
function sortObjectByKey (object) {
const sortedObject = {}
for (let keyName of Object.keys(object).sort()) {
sortedObject[keyName] = object[keyName]
}
return sortedObject
}

View File

@ -0,0 +1,134 @@
'use strict'
const CSON = require('season')
const deprecatedPackagesMetadata = require('../deprecated-packages')
const fs = require('fs-extra')
const normalizePackageData = require('normalize-package-data')
const path = require('path')
const semver = require('semver')
const spawnSync = require('./spawn-sync')
const CONFIG = require('../config')
module.exports = function () {
console.log(`Generating metadata for ${path.join(CONFIG.intermediateAppPath, 'package.json')}`)
CONFIG.appMetadata._atomPackages = buildBundledPackagesMetadata()
CONFIG.appMetadata._atomMenu = buildPlatformMenuMetadata()
CONFIG.appMetadata._atomKeymaps = buildPlatformKeymapsMetadata()
CONFIG.appMetadata._deprecatedPackages = deprecatedPackagesMetadata
CONFIG.appMetadata.version = computeAppVersion()
checkDeprecatedPackagesMetadata()
fs.writeFileSync(path.join(CONFIG.intermediateAppPath, 'package.json'), JSON.stringify(CONFIG.appMetadata))
}
function buildBundledPackagesMetadata () {
const packages = {}
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
const packagePath = path.join(CONFIG.intermediateAppPath, 'node_modules', packageName)
const packageMetadataPath = path.join(packagePath, 'package.json')
const packageMetadata = JSON.parse(fs.readFileSync(packageMetadataPath, 'utf8'))
normalizePackageData(packageMetadata, () => {
throw new Error(`Invalid package metadata. ${metadata.name}: ${msg}`)
}, true)
if (packageMetadata.repository && packageMetadata.repository.url && packageMetadata.repository.type === 'git') {
packageMetadata.repository.url = packageMetadata.repository.url.replace(/^git\+/, '')
}
delete packageMetadata['_from']
delete packageMetadata['_id']
delete packageMetadata['dist']
delete packageMetadata['readme']
delete packageMetadata['readmeFilename']
const packageModuleCache = packageMetadata._atomModuleCache || {}
if (packageModuleCache.extensions && packageModuleCache.extensions['.json']) {
const index = packageModuleCache.extensions['.json'].indexOf('package.json')
if (index !== -1) {
packageModuleCache.extensions['.json'].splice(index, 1)
}
}
const packageNewMetadata = {metadata: packageMetadata, keymaps: {}, menus: {}}
if (packageMetadata.main) {
const mainPath = require.resolve(path.resolve(packagePath, packageMetadata.main))
packageNewMetadata.main = path.relative(CONFIG.intermediateAppPath, mainPath)
}
const packageKeymapsPath = path.join(packagePath, 'keymaps')
if (fs.existsSync(packageKeymapsPath)) {
for (let packageKeymapName of fs.readdirSync(packageKeymapsPath)) {
const packageKeymapPath = path.join(packageKeymapsPath, packageKeymapName)
if (packageKeymapPath.endsWith('.cson') || packageKeymapPath.endsWith('.json')) {
const relativePath = path.relative(CONFIG.intermediateAppPath, packageKeymapPath)
packageNewMetadata.keymaps[relativePath] = CSON.readFileSync(packageKeymapPath)
}
}
}
const packageMenusPath = path.join(packagePath, 'menus')
if (fs.existsSync(packageMenusPath)) {
for (let packageMenuName of fs.readdirSync(packageMenusPath)) {
const packageMenuPath = path.join(packageMenusPath, packageMenuName)
if (packageMenuPath.endsWith('.cson') || packageMenuPath.endsWith('.json')) {
const relativePath = path.relative(CONFIG.intermediateAppPath, packageMenuPath)
packageNewMetadata.menus[relativePath] = CSON.readFileSync(packageMenuPath)
}
}
}
packages[packageMetadata.name] = packageNewMetadata
if (packageModuleCache.extensions) {
for (let extension of Object.keys(packageModuleCache.extensions)) {
const paths = packageModuleCache.extensions[extension]
if (paths.length === 0) {
delete packageModuleCache.extensions[extension]
}
}
}
}
return packages
}
function buildPlatformMenuMetadata () {
const menuPath = path.join(CONFIG.repositoryRootPath, 'menus', `${process.platform}.cson`)
if (fs.existsSync(menuPath)) {
return CSON.readFileSync(menuPath)
} else {
return null
}
}
function buildPlatformKeymapsMetadata () {
const invalidPlatforms = ['darwin', 'freebsd', 'linux', 'sunos', 'win32'].filter(p => p !== process.platform)
const keymapsPath = path.join(CONFIG.repositoryRootPath, 'keymaps')
const keymaps = {}
for (let keymapName of fs.readdirSync(keymapsPath)) {
const keymapPath = path.join(keymapsPath, keymapName)
if (keymapPath.endsWith('.cson') || keymapPath.endsWith('.json')) {
const keymapPlatform = path.basename(keymapPath, path.extname(keymapPath))
if (invalidPlatforms.indexOf(keymapPlatform) === -1) {
keymaps[path.basename(keymapPath)] = CSON.readFileSync(keymapPath)
}
}
}
return keymaps
}
function checkDeprecatedPackagesMetadata () {
for (let packageName of Object.keys(deprecatedPackagesMetadata)) {
const packageMetadata = deprecatedPackagesMetadata[packageName]
if (packageMetadata.version && !semver.validRange(packageMetadata.version)) {
throw new Error(`Invalid range: ${version} (${name}).`)
}
}
}
function computeAppVersion () {
let version = CONFIG.appMetadata.version
if (CONFIG.channel === 'dev') {
const result = spawnSync('git', ['rev-parse', '--short', 'HEAD'], {cwd: CONFIG.repositoryRootPath})
const commitHash = result.stdout.toString().trim()
version += '-' + commitHash
}
return version
}

View File

@ -0,0 +1,31 @@
'use strict'
const fs = require('fs')
const path = require('path')
const ModuleCache = require('../../src/module-cache')
const CONFIG = require('../config')
module.exports = function () {
console.log(`Generating module cache for ${CONFIG.intermediateAppPath}`)
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
ModuleCache.create(path.join(CONFIG.intermediateAppPath, 'node_modules', packageName))
}
ModuleCache.create(CONFIG.intermediateAppPath)
const newMetadata = JSON.parse(fs.readFileSync(path.join(CONFIG.intermediateAppPath, 'package.json')))
for (let folder of newMetadata._atomModuleCache.folders) {
if (folder.paths.indexOf('') !== -1) {
folder.paths = [
'',
'exports',
'spec',
'src',
'src/main-process',
'static',
'vendor'
]
}
}
CONFIG.appMetadata = newMetadata
fs.writeFileSync(path.join(CONFIG.intermediateAppPath, 'package.json'), JSON.stringify(CONFIG.appMetadata))
}

View File

@ -0,0 +1,38 @@
'use strict'
const fs = require('fs')
const path = require('path')
const legalEagle = require('legal-eagle')
const licenseOverrides = require('../license-overrides')
const CONFIG = require('../config')
module.exports = function () {
return new Promise((resolve, reject) => {
legalEagle({path: CONFIG.repositoryRootPath, overrides: licenseOverrides}, (err, packagesLicenses) => {
if (err) {
reject(err)
throw new Error(err)
} else {
let text =
fs.readFileSync(path.join(CONFIG.repositoryRootPath, 'LICENSE.md'), 'utf8') + '\n\n' +
'This application bundles the following third-party packages in accordance\n' +
'with the following licenses:\n\n'
for (let packageName of Object.keys(packagesLicenses).sort()) {
const packageLicense = packagesLicenses[packageName]
text += "-------------------------------------------------------------------------\n\n"
text += `Package: ${packageName}\n`
text += `License: ${packageLicense.license}\n`
if (packageLicense.source) {
text += `License Source: ${packageLicense.source}\n`
}
if (packageLicense.sourceText) {
text += `Source Text:\n\n${packageLicense.sourceText}`
}
text += '\n'
}
resolve(text)
}
})
})
}

View File

@ -0,0 +1,99 @@
'use strict'
const path = require('path')
const CONFIG = require('../config')
module.exports = function (path) {
return !EXCLUDED_PATHS_REGEXP.test(path)
}
const EXCLUDE_REGEXPS_SOURCES = [
escapeRegExp('.DS_Store'),
escapeRegExp('.jshintrc'),
escapeRegExp('.npmignore'),
escapeRegExp('.pairs'),
escapeRegExp('.travis.yml'),
escapeRegExp('appveyor.yml'),
escapeRegExp('circle.yml'),
escapeRegExp('.idea'),
escapeRegExp('.editorconfig'),
escapeRegExp('.lint'),
escapeRegExp('.lintignore'),
escapeRegExp('.eslintrc'),
escapeRegExp('.jshintignore'),
escapeRegExp('coffeelint.json'),
escapeRegExp('.coffeelintignore'),
escapeRegExp('.gitattributes'),
escapeRegExp('.gitkeep'),
escapeRegExp(path.join('git-utils', 'deps')),
escapeRegExp(path.join('oniguruma', 'deps')),
escapeRegExp(path.join('less', 'dist')),
escapeRegExp(path.join('npm', 'doc')),
escapeRegExp(path.join('npm', 'html')),
escapeRegExp(path.join('npm', 'man')),
escapeRegExp(path.join('npm', 'node_modules', '.bin', 'beep')),
escapeRegExp(path.join('npm', 'node_modules', '.bin', 'clear')),
escapeRegExp(path.join('npm', 'node_modules', '.bin', 'starwars')),
escapeRegExp(path.join('pegjs', 'examples')),
escapeRegExp(path.join('get-parameter-names', 'node_modules', 'testla')),
escapeRegExp(path.join('get-parameter-names', 'node_modules', '.bin', 'testla')),
escapeRegExp(path.join('jasmine-reporters', 'ext')),
escapeRegExp(path.join('node_modules', 'nan')),
escapeRegExp(path.join('node_modules', 'native-mate')),
escapeRegExp(path.join('build', 'binding.Makefile')),
escapeRegExp(path.join('build', 'config.gypi')),
escapeRegExp(path.join('build', 'gyp-mac-tool')),
escapeRegExp(path.join('build', 'Makefile')),
escapeRegExp(path.join('build', 'Release', 'obj.target')),
escapeRegExp(path.join('build', 'Release', 'obj')),
escapeRegExp(path.join('build', 'Release', '.deps')),
escapeRegExp(path.join('vendor', 'apm')),
// These are only required in dev-mode, when pegjs grammars aren't precompiled
escapeRegExp(path.join('node_modules', 'loophole')),
escapeRegExp(path.join('node_modules', 'pegjs')),
escapeRegExp(path.join('node_modules', '.bin', 'pegjs')),
escapeRegExp(path.join('node_modules', 'spellchecker', 'vendor', 'hunspell') + path.sep) + '.*',
escapeRegExp(path.join('build', 'Release') + path.sep) + '.*\\.pdb',
// Ignore *.cc and *.h files from native modules
escapeRegExp(path.join('ctags', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('git-utils', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('keytar', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('nslog', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('oniguruma', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('pathwatcher', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('runas', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('scrollbar-style', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('spellchecker', 'src') + path.sep) + '.*\\.(cc|h)*',
escapeRegExp(path.join('cached-run-in-this-context', 'src') + path.sep) + '.*\\.(cc|h)?',
escapeRegExp(path.join('keyboard-layout', 'src') + path.sep) + '.*\\.(cc|h|mm)*',
// Ignore build files
escapeRegExp(path.sep) + 'binding\\.gyp$',
escapeRegExp(path.sep) + '.+\\.target.mk$',
escapeRegExp(path.sep) + 'linker\\.lock$',
escapeRegExp(path.join('build', 'Release') + path.sep) + '.+\\.node\\.dSYM',
// Ignore test and example folders
'node_modules' + escapeRegExp(path.sep) + '.*' + escapeRegExp(path.sep) + '_*te?sts?_*' + escapeRegExp(path.sep),
'node_modules' + escapeRegExp(path.sep) + '.*' + escapeRegExp(path.sep) + 'examples?' + escapeRegExp(path.sep),
]
// Ignore spec directories in all bundled packages
for (let packageName in CONFIG.appMetadata.packageDependencies) {
EXCLUDE_REGEXPS_SOURCES.push('^' + escapeRegExp(path.join(CONFIG.repositoryRootPath, 'node_modules', packageName, 'spec')))
}
// Ignore Hunspell dictionaries only on macOS.
if (process.platform === 'darwin') {
EXCLUDE_REGEXPS_SOURCES.push(escapeRegExp(path.join('spellchecker', 'vendor', 'hunspell_dictionaries')))
}
const EXCLUDED_PATHS_REGEXP = new RegExp(
EXCLUDE_REGEXPS_SOURCES.map(path => `(${path})`).join('|')
)
function escapeRegExp (string) {
return string.replace(/[.?*+^$[\]\\(){}|-]/g, "\\$&")
}

15
script/lib/install-apm.js Normal file
View File

@ -0,0 +1,15 @@
'use strict'
const childProcess = require('child_process')
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
console.log('Installing apm')
childProcess.execFileSync(
CONFIG.getNpmBinPath(),
['--global-style', '--loglevel=error', 'install'],
{env: process.env, cwd: CONFIG.apmRootPath}
)
}

View File

@ -0,0 +1,92 @@
'use strict'
const fs = require('fs-extra')
const path = require('path')
const runas = require('runas')
const template = require('lodash.template')
const CONFIG = require('../config')
module.exports = function (packagedAppPath) {
const packagedAppFileName = path.basename(packagedAppPath)
if (process.platform === 'darwin') {
const installationDirPath = path.join(path.sep, 'Applications', packagedAppFileName)
if (fs.existsSync(installationDirPath)) {
console.log(`Removing previously installed "${packagedAppFileName}" at "${installationDirPath}"`)
fs.removeSync(installationDirPath)
}
console.log(`Installing "${packagedAppPath}" at "${installationDirPath}"`)
fs.copySync(packagedAppPath, installationDirPath)
} else if (process.platform === 'win32') {
const installationDirPath = path.join(process.env.LOCALAPPDATA, packagedAppFileName, 'app-dev')
try {
if (fs.existsSync(installationDirPath)) {
console.log(`Removing previously installed "${packagedAppFileName}" at "${installationDirPath}"`)
fs.removeSync(installationDirPath)
}
console.log(`Installing "${packagedAppPath}" at "${installationDirPath}"`)
fs.copySync(packagedAppPath, installationDirPath)
} catch (e) {
console.log(`Administrator elevation required to install into "${installationDirPath}"`)
const copyScriptPath = path.join(CONFIG.repositoryRootPath, 'script', 'copy-folder.cmd')
const exitCode = runas('cmd', ['/c', copyScriptPath, packagedAppPath, installationDirPath], {admin: true})
if (exitCode !== 0) {
throw new Error(`Installation failed. "${copyScriptPath}" exited with status: ${exitCode}`)
}
}
} else {
const atomExecutableName = CONFIG.channel === 'beta' ? 'atom-beta' : 'atom'
const apmExecutableName = CONFIG.channel === 'beta' ? 'apm-beta' : 'apm'
const appName = CONFIG.channel === 'beta' ? 'Atom Beta' : 'Atom'
const appDescription = CONFIG.appMetadata.description
const userLocalDirPath = path.join('/usr', 'local')
const shareDirPath = path.join(userLocalDirPath, 'share')
const installationDirPath = path.join(shareDirPath, atomExecutableName)
const applicationsDirPath = path.join(shareDirPath, 'applications')
const desktopEntryPath = path.join(applicationsDirPath, `${atomExecutableName}.desktop`)
const binDirPath = path.join(userLocalDirPath, 'bin')
const atomBinDestinationPath = path.join(binDirPath, atomExecutableName)
const apmBinDestinationPath = path.join(binDirPath, apmExecutableName)
fs.mkdirpSync(applicationsDirPath)
fs.mkdirpSync(binDirPath)
if (fs.existsSync(installationDirPath)) {
console.log(`Removing previously installed "${packagedAppFileName}" at "${installationDirPath}"`)
fs.removeSync(installationDirPath)
}
console.log(`Installing "${packagedAppFileName}" at "${installationDirPath}"`)
fs.copySync(packagedAppPath, installationDirPath)
if (fs.existsSync(desktopEntryPath)) {
console.log(`Removing existing desktop entry file at "${desktopEntryPath}"`)
fs.removeSync(desktopEntryPath)
}
console.log(`Writing desktop entry file at "${desktopEntryPath}"`)
const iconPath = path.join(CONFIG.repositoryRootPath, 'resources', 'app-icons', CONFIG.channel, 'png', '1024.png')
const desktopEntryTemplate = fs.readFileSync(path.join(CONFIG.repositoryRootPath, 'resources', 'linux', 'atom.desktop.in'))
const desktopEntryContents = template(desktopEntryTemplate)({
appName, appFileName: atomExecutableName, description: appDescription,
installDir: '/usr', iconPath
})
fs.writeFileSync(desktopEntryPath, desktopEntryContents)
if (fs.existsSync(atomBinDestinationPath)) {
console.log(`Removing existing executable at "${atomBinDestinationPath}"`)
fs.removeSync(atomBinDestinationPath)
}
console.log(`Copying atom.sh to "${atomBinDestinationPath}"`)
fs.copySync(path.join(CONFIG.repositoryRootPath, 'atom.sh'), atomBinDestinationPath)
try {
fs.lstatSync(apmBinDestinationPath)
console.log(`Removing existing executable at "${apmBinDestinationPath}"`)
fs.removeSync(apmBinDestinationPath)
} catch (e) { }
console.log(`Symlinking apm to "${apmBinDestinationPath}"`)
fs.symlinkSync(path.join('..', 'share', atomExecutableName, 'resources', 'app', 'apm', 'node_modules', '.bin', 'apm'), apmBinDestinationPath)
console.log(`Changing permissions to 755 for "${installationDirPath}"`)
fs.chmodSync(installationDirPath, '755')
}
}

View File

@ -0,0 +1,24 @@
'use strict'
const childProcess = require('child_process')
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
const installEnv = Object.assign({}, process.env)
// Set resource path so that apm can load metadata related to Atom.
installEnv.ATOM_RESOURCE_PATH = CONFIG.repositoryRootPath
// Set our target (Electron) version so that node-pre-gyp can download the
// proper binaries.
installEnv.npm_config_target = CONFIG.appMetadata.electronVersion;
// Force 32-bit modules on Windows. (Ref.: https://github.com/atom/atom/issues/10450)
if (process.platform === 'win32') {
installEnv.npm_config_target_arch = 'ia32'
}
childProcess.execFileSync(
CONFIG.getApmBinPath(),
['--loglevel=error', 'install'],
{env: installEnv, cwd: CONFIG.repositoryRootPath, stdio: 'inherit'}
)
}

View File

@ -0,0 +1,15 @@
'use strict'
const childProcess = require('child_process')
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
console.log('Installing script dependencies')
childProcess.execFileSync(
CONFIG.getNpmBinPath(),
['--loglevel=error', 'install'],
{env: process.env, cwd: CONFIG.scriptRootPath}
)
}

View File

@ -0,0 +1,12 @@
const childProcess = require('child_process')
const CONFIG = require('../config.js')
module.exports = function () {
if (process.platform === 'win32') {
// Use START as a way to ignore error if Atom.exe isnt running
childProcess.execSync(`START taskkill /F /IM ${CONFIG.appMetadata.productName}.exe`)
} else {
childProcess.execSync(`pkill -9 ${CONFIG.appMetadata.productName} || true`)
}
}

View File

@ -0,0 +1,27 @@
'use strict'
const coffeelint = require('coffeelint')
const expandGlobPaths = require('./expand-glob-paths')
const path = require('path')
const readFiles = require('./read-files')
const CONFIG = require('../config')
module.exports = function () {
const globPathsToLint = [
path.join(CONFIG.repositoryRootPath, 'dot-atom/**/*.coffee'),
path.join(CONFIG.repositoryRootPath, 'src/**/*.coffee'),
path.join(CONFIG.repositoryRootPath, 'spec/*.coffee')
]
return expandGlobPaths(globPathsToLint).then(readFiles).then((files) => {
const errors = []
const lintConfiguration = require(path.join(CONFIG.repositoryRootPath, 'coffeelint.json'))
for (let file of files) {
const lintErrors = coffeelint.lint(file.content, lintConfiguration, false)
for (let error of lintErrors) {
errors.push({path: file.path, lineNumber: error.lineNumber, message: error.message, rule: error.rule})
}
}
return errors
})
}

View File

@ -0,0 +1,32 @@
'use strict'
const expandGlobPaths = require('./expand-glob-paths')
const standard = require('standard')
const path = require('path')
const CONFIG = require('../config')
module.exports = function () {
const globPathsToLint = [
path.join(CONFIG.repositoryRootPath, 'exports', '**', '*.js'),
path.join(CONFIG.repositoryRootPath, 'src', '**', '*.js'),
path.join(CONFIG.repositoryRootPath, 'static', '*.js')
]
return expandGlobPaths(globPathsToLint).then((paths) => {
return new Promise((resolve, reject) => {
standard.lintFiles(paths, (error, lintOutput) => {
if (error) {
reject(error)
} else {
const errors = []
for (let result of lintOutput.results) {
for (let message of result.messages) {
errors.push({path: result.filePath, lineNumber: message.line, message: message.message, rule: message.ruleId})
}
}
resolve(errors)
}
})
})
})
}

View File

@ -0,0 +1,64 @@
'use strict'
const csslint = require('csslint').CSSLint
const expandGlobPaths = require('./expand-glob-paths')
const LessCache = require('less-cache')
const path = require('path')
const readFiles = require('./read-files')
const CONFIG = require('../config')
const LESS_CACHE_VERSION = require('less-cache/package.json').version
module.exports = function () {
const globPathsToLint = [
path.join(CONFIG.repositoryRootPath, 'static/**/*.less')
]
const lintOptions = {
'adjoining-classes': false,
'duplicate-background-images': false,
'box-model': false,
'box-sizing': false,
'bulletproof-font-face': false,
'compatible-vendor-prefixes': false,
'display-property-grouping': false,
'duplicate-properties': false,
'fallback-colors': false,
'font-sizes': false,
'gradients': false,
'ids': false,
'important': false,
'known-properties': false,
'order-alphabetical': false,
'outline-none': false,
'overqualified-elements': false,
'regex-selectors': false,
'qualified-headings': false,
'unique-headings': false,
'universal-selector': false,
'vendor-prefix': false
}
for (let rule of csslint.getRules()) {
if (!lintOptions.hasOwnProperty(rule.id)) lintOptions[rule.id] = true
}
const lessCache = new LessCache({
cacheDir: path.join(CONFIG.intermediateAppPath, 'less-compile-cache'),
fallbackDir: path.join(CONFIG.atomHomeDirPath, 'compile-cache', 'prebuild-less', LESS_CACHE_VERSION),
syncCaches: true,
resourcePath: CONFIG.repositoryRootPath,
importPaths: [
path.join(CONFIG.intermediateAppPath, 'static', 'variables'),
path.join(CONFIG.intermediateAppPath, 'static')
]
})
return expandGlobPaths(globPathsToLint).then(readFiles).then((files) => {
const errors = []
for (let file of files) {
const css = lessCache.cssForFile(file.path, file.content)
const result = csslint.verify(css, lintOptions)
for (let message of result.messages) {
errors.push({path: file.path.replace(/\.less$/, '.css'), lineNumber: message.line, message: message.message, rule: message.rule.id})
}
}
return errors
})
}

View File

@ -0,0 +1,179 @@
'use strict'
const assert = require('assert')
const childProcess = require('child_process')
const electronPackager = require('electron-packager')
const fs = require('fs-extra')
const includePathInPackagedApp = require('./include-path-in-packaged-app')
const getLicenseText = require('./get-license-text')
const path = require('path')
const spawnSync = require('./spawn-sync')
const CONFIG = require('../config')
module.exports = function () {
const appName = getAppName()
console.log(`Running electron-packager on ${CONFIG.intermediateAppPath} with app name "${appName}"`)
return runPackager({
'app-bundle-id': 'com.github.atom',
'app-copyright': `Copyright © 2014-${(new Date()).getFullYear()} GitHub, Inc. All rights reserved.`,
'app-version': CONFIG.appMetadata.version,
'arch': process.platform === 'win32' ? 'ia32' : 'x64',
'asar': {unpack: buildAsarUnpackGlobExpression()},
'build-version': CONFIG.appMetadata.version,
'download': {cache: CONFIG.electronDownloadPath},
'dir': CONFIG.intermediateAppPath,
'extend-info': path.join(CONFIG.repositoryRootPath, 'resources', 'mac', 'atom-Info.plist'),
'helper-bundle-id': 'com.github.atom.helper',
'icon': getIcon(),
'name': appName,
'out': CONFIG.buildOutputPath,
'overwrite': true,
'platform': process.platform,
'version': CONFIG.appMetadata.electronVersion,
'version-string': {
'CompanyName': 'GitHub, Inc.',
'FileDescription': 'Atom',
'ProductName': 'Atom'
}
}).then((packagedAppPath) => {
let bundledResourcesPath
if (process.platform === 'darwin') {
bundledResourcesPath = path.join(packagedAppPath, 'Contents', 'Resources')
setAtomHelperVersion(packagedAppPath)
} else if (process.platform === 'linux') {
bundledResourcesPath = path.join(packagedAppPath, 'resources')
chmodNodeFiles(packagedAppPath)
} else {
bundledResourcesPath = path.join(packagedAppPath, 'resources')
}
return copyNonASARResources(packagedAppPath, bundledResourcesPath).then(() => {
console.log(`Application bundle created at ${packagedAppPath}`)
return packagedAppPath
})
})
}
function copyNonASARResources (packagedAppPath, bundledResourcesPath) {
console.log(`Copying non-ASAR resources to ${bundledResourcesPath}`)
fs.copySync(
path.join(CONFIG.repositoryRootPath, 'apm', 'node_modules', 'atom-package-manager'),
path.join(bundledResourcesPath, 'app', 'apm'),
{filter: includePathInPackagedApp}
)
if (process.platform !== 'win32') {
// Existing symlinks on user systems point to an outdated path, so just symlink it to the real location of the apm binary.
// TODO: Change command installer to point to appropriate path and remove this fallback after a few releases.
fs.symlinkSync(path.join('..', '..', 'bin', 'apm'), path.join(bundledResourcesPath, 'app', 'apm', 'node_modules', '.bin', 'apm'))
fs.copySync(path.join(CONFIG.repositoryRootPath, 'atom.sh'), path.join(bundledResourcesPath, 'app', 'atom.sh'))
}
if (process.platform === 'darwin') {
fs.copySync(path.join(CONFIG.repositoryRootPath, 'resources', 'mac', 'file.icns'), path.join(bundledResourcesPath, 'file.icns'))
} else if (process.platform === 'linux') {
fs.copySync(path.join(CONFIG.repositoryRootPath, 'resources', 'app-icons', CONFIG.channel, 'png', '1024.png'), path.join(packagedAppPath, 'atom.png'))
} else if (process.platform === 'win32') {
fs.copySync(path.join('resources', 'win', 'atom.cmd'), path.join(bundledResourcesPath, 'cli', 'atom.cmd'))
fs.copySync(path.join('resources', 'win', 'atom.sh'), path.join(bundledResourcesPath, 'cli', 'atom.sh'))
fs.copySync(path.join('resources', 'win', 'atom.js'), path.join(bundledResourcesPath, 'cli', 'atom.js'))
fs.copySync(path.join('resources', 'win', 'apm.cmd'), path.join(bundledResourcesPath, 'cli', 'apm.cmd'))
fs.copySync(path.join('resources', 'win', 'apm.sh'), path.join(bundledResourcesPath, 'cli', 'apm.sh'))
}
console.log(`Writing LICENSE.md to ${bundledResourcesPath}`)
return getLicenseText().then((licenseText) => {
fs.writeFileSync(path.join(bundledResourcesPath, 'LICENSE.md'), licenseText)
})
}
function setAtomHelperVersion (packagedAppPath) {
const frameworksPath = path.join(packagedAppPath, 'Contents', 'Frameworks')
const helperPListPath = path.join(frameworksPath, 'Atom Helper.app', 'Contents', 'Info.plist')
console.log(`Setting Atom Helper Version for ${helperPListPath}`)
spawnSync('/usr/libexec/PlistBuddy', ['-c', `Add CFBundleVersion string ${CONFIG.appMetadata.version}`, helperPListPath])
spawnSync('/usr/libexec/PlistBuddy', ['-c', `Add CFBundleShortVersionString string ${CONFIG.appMetadata.version}`, helperPListPath])
}
function chmodNodeFiles (packagedAppPath) {
console.log(`Changing permissions for node files in ${packagedAppPath}`)
childProcess.execSync(`find "${packagedAppPath}" -type f -name *.node -exec chmod a-x {} \\;`)
}
function buildAsarUnpackGlobExpression () {
const unpack = [
'*.node',
'ctags-config',
'ctags-darwin',
'ctags-linux',
'ctags-win32.exe',
path.join('**', 'node_modules', 'spellchecker', '**'),
path.join('**', 'resources', 'atom.png')
]
return `{${unpack.join(',')}}`
}
function getAppName () {
if (process.platform === 'darwin') {
return CONFIG.channel === 'beta' ? 'Atom Beta' : 'Atom'
} else {
return 'atom'
}
}
function getIcon () {
switch (process.platform) {
case 'darwin':
return path.join(CONFIG.repositoryRootPath, 'resources', 'app-icons', CONFIG.channel, 'atom.icns')
case 'linux':
// Don't pass an icon, as the dock/window list icon is set via the icon
// option in the BrowserWindow constructor in atom-window.coffee.
return null
default:
return path.join(CONFIG.repositoryRootPath, 'resources', 'app-icons', CONFIG.channel, 'atom.ico')
}
}
function runPackager (options) {
return new Promise((resolve, reject) => {
electronPackager(options, (err, packageOutputDirPaths) => {
if (err) {
reject(err)
throw new Error(err)
} else {
assert(packageOutputDirPaths.length === 1, 'Generated more than one electron application!')
const packagedAppPath = renamePackagedAppDir(packageOutputDirPaths[0])
resolve(packagedAppPath)
}
})
})
}
function renamePackagedAppDir (packageOutputDirPath) {
let packagedAppPath
if (process.platform === 'darwin') {
const appBundleName = getAppName() + '.app'
packagedAppPath = path.join(CONFIG.buildOutputPath, appBundleName)
if (fs.existsSync(packagedAppPath)) fs.removeSync(packagedAppPath)
fs.renameSync(path.join(packageOutputDirPath, appBundleName), packagedAppPath)
} else if (process.platform === 'linux') {
const appName = CONFIG.channel === 'beta' ? 'atom-beta' : 'atom'
let architecture
if (process.arch === 'ia32') {
architecture = 'i386'
} else if (process.arch === 'x64') {
architecture = 'amd64'
} else {
architecture = process.arch
}
packagedAppPath = path.join(CONFIG.buildOutputPath, `${appName}-${CONFIG.appMetadata.version}-${architecture}`)
if (fs.existsSync(packagedAppPath)) fs.removeSync(packagedAppPath)
fs.renameSync(packageOutputDirPath, packagedAppPath)
} else {
const appName = CONFIG.channel === 'beta' ? 'Atom Beta' : 'Atom'
packagedAppPath = path.join(CONFIG.buildOutputPath, appName)
if (fs.existsSync(packagedAppPath)) fs.removeSync(packagedAppPath)
fs.renameSync(packageOutputDirPath, packagedAppPath)
}
return packagedAppPath
}

View File

@ -0,0 +1,78 @@
'use strict'
const fs = require('fs')
const glob = require('glob')
const path = require('path')
const LessCache = require('less-cache')
const CONFIG = require('../config')
const LESS_CACHE_VERSION = require('less-cache/package.json').version
const FALLBACK_VARIABLE_IMPORTS = '@import "variables/ui-variables";\n@import "variables/syntax-variables";\n'
module.exports = function () {
const cacheDirPath = path.join(CONFIG.intermediateAppPath, 'less-compile-cache')
console.log(`Generating pre-built less cache in ${cacheDirPath}`)
// Group bundled packages into UI themes, syntax themes, and non-theme packages
const uiThemes = []
const syntaxThemes = []
const nonThemePackages = []
for (let packageName in CONFIG.appMetadata.packageDependencies) {
const packageMetadata = require(path.join(CONFIG.intermediateAppPath, 'node_modules', packageName, 'package.json'))
if (packageMetadata.theme === 'ui') {
uiThemes.push(packageName)
} else if (packageMetadata.theme === 'syntax') {
syntaxThemes.push(packageName)
} else {
nonThemePackages.push(packageName)
}
}
// Warm cache for every combination of the default UI and syntax themes,
// because themes assign variables which may be used in any style sheet.
for (let uiTheme of uiThemes) {
for (let syntaxTheme of syntaxThemes) {
// Build a LessCache instance with import paths based on the current theme combination
const lessCache = new LessCache({
cacheDir: cacheDirPath,
fallbackDir: path.join(CONFIG.atomHomeDirPath, 'compile-cache', 'prebuild-less', LESS_CACHE_VERSION),
syncCaches: true,
resourcePath: CONFIG.intermediateAppPath,
importPaths: [
path.join(CONFIG.intermediateAppPath, 'node_modules', syntaxTheme, 'styles'),
path.join(CONFIG.intermediateAppPath, 'node_modules', uiTheme, 'styles'),
path.join(CONFIG.intermediateAppPath, 'static', 'variables'),
path.join(CONFIG.intermediateAppPath, 'static'),
]
})
function cacheCompiledCSS(lessFilePath, importFallbackVariables) {
let lessSource = fs.readFileSync(lessFilePath, 'utf8')
if (importFallbackVariables) {
lessSource = FALLBACK_VARIABLE_IMPORTS + lessSource
}
lessCache.cssForFile(lessFilePath, lessSource)
}
// Cache all styles in static; don't append variable imports
for (let lessFilePath of glob.sync(path.join(CONFIG.intermediateAppPath, 'static', '**', '*.less'))) {
cacheCompiledCSS(lessFilePath, false)
}
// Cache styles for all bundled non-theme packages
for (let nonThemePackage of nonThemePackages) {
for (let lessFilePath of glob.sync(path.join(CONFIG.intermediateAppPath, 'node_modules', nonThemePackage, '**', '*.less'))) {
cacheCompiledCSS(lessFilePath, true)
}
}
// Cache styles for this UI theme
const uiThemeMainPath = path.join(CONFIG.intermediateAppPath, 'node_modules', uiTheme, 'index.less')
cacheCompiledCSS(uiThemeMainPath, true)
// Cache styles for this syntax theme
const syntaxThemeMainPath = path.join(CONFIG.intermediateAppPath, 'node_modules', syntaxTheme, 'index.less')
cacheCompiledCSS(syntaxThemeMainPath, true)
}
}
}

19
script/lib/read-files.js Normal file
View File

@ -0,0 +1,19 @@
'use strict'
const fs = require('fs')
module.exports = function (paths) {
return Promise.all(paths.map(readFile))
}
function readFile (path) {
return new Promise((resolve, reject) => {
fs.readFile(path, 'utf8', (error, content) => {
if (error) {
reject(error)
} else {
resolve({path, content})
}
})
})
}

20
script/lib/spawn-sync.js Normal file
View File

@ -0,0 +1,20 @@
// This file exports a function that has the same interface as
// `spawnSync`, but it throws if there's an error while executing
// the supplied command or if the exit code is not 0. This is similar to what
// `execSync` does, but we want to use `spawnSync` because it provides automatic
// escaping for the supplied arguments.
const childProcess = require('child_process')
module.exports = function () {
const result = childProcess.spawnSync.apply(childProcess, arguments)
if (result.error) {
throw result.error
} else if (result.status !== 0) {
if (result.stdout) console.error(result.stdout.toString())
if (result.stderr) console.error(result.stderr.toString())
throw new Error(`Command ${result.args.join(' ')} exited with code "${result.status}"`)
} else {
return result
}
}

View File

@ -0,0 +1,41 @@
'use strict'
const CompileCache = require('../../src/compile-cache')
const fs = require('fs')
const glob = require('glob')
const path = require('path')
const CONFIG = require('../config')
const BABEL_OPTIONS = require('../../static/babelrc.json')
const BABEL_PREFIXES = [
"'use babel'",
'"use babel"',
'/** @babel */',
'/* @flow */'
]
const PREFIX_LENGTH = Math.max.apply(null, BABEL_PREFIXES.map(prefix => prefix.length))
const BUFFER = Buffer(PREFIX_LENGTH)
module.exports = function () {
console.log(`Transpiling Babel paths in ${CONFIG.intermediateAppPath}`)
for (let path of getPathsToTranspile()) {
transpileBabelPath(path)
}
}
function getPathsToTranspile () {
let paths = []
paths = paths.concat(glob.sync(path.join(CONFIG.intermediateAppPath, 'exports', '**', '*.js')))
paths = paths.concat(glob.sync(path.join(CONFIG.intermediateAppPath, 'src', '**', '*.js')))
for (let packageName of Object.keys(CONFIG.appMetadata.packageDependencies)) {
paths = paths.concat(glob.sync(
path.join(CONFIG.intermediateAppPath, 'node_modules', packageName, '**', '*.js'),
{ignore: path.join(CONFIG.intermediateAppPath, 'node_modules', packageName, 'spec', '**', '*.js')}
))
}
return paths
}
function transpileBabelPath (path) {
fs.writeFileSync(path, CompileCache.addPathToCache(path, CONFIG.atomHomeDirPath))
}

Some files were not shown because too many files have changed in this diff Show More