Merge branch 'stable' into beta

Conflicts:
	package.json
This commit is contained in:
Max Brunsfeld 2015-12-16 10:21:29 -08:00
commit f670dd8ff2
8 changed files with 154 additions and 57 deletions

View File

@ -18,7 +18,7 @@
"atom-keymap": "^6.2.0",
"babel-core": "^5.8.21",
"bootstrap": "^3.3.4",
"cached-run-in-this-context": "0.4.0",
"cached-run-in-this-context": "0.4.1",
"clear-cut": "^2.0.1",
"coffee-script": "1.8.0",
"color": "^0.7.3",

View File

@ -9,61 +9,71 @@ describe "FileSystemBlobStore", ->
blobStore = FileSystemBlobStore.load(storageDirectory)
it "is empty when the file doesn't exist", ->
expect(blobStore.get("foo")).toBeUndefined()
expect(blobStore.get("bar")).toBeUndefined()
expect(blobStore.get("foo", "invalidation-key-1")).toBeUndefined()
expect(blobStore.get("bar", "invalidation-key-2")).toBeUndefined()
it "allows to read and write buffers from/to memory without persisting them", ->
blobStore.set("foo", new Buffer("foo"))
blobStore.set("bar", new Buffer("bar"))
blobStore.set("foo", "invalidation-key-1", new Buffer("foo"))
blobStore.set("bar", "invalidation-key-2", new Buffer("bar"))
expect(blobStore.get("foo")).toEqual(new Buffer("foo"))
expect(blobStore.get("bar")).toEqual(new Buffer("bar"))
expect(blobStore.get("foo", "invalidation-key-1")).toEqual(new Buffer("foo"))
expect(blobStore.get("bar", "invalidation-key-2")).toEqual(new Buffer("bar"))
expect(blobStore.get("foo", "unexisting-key")).toBeUndefined()
expect(blobStore.get("bar", "unexisting-key")).toBeUndefined()
it "persists buffers when saved and retrieves them on load, giving priority to in-memory ones", ->
blobStore.set("foo", new Buffer("foo"))
blobStore.set("bar", new Buffer("bar"))
blobStore.set("foo", "invalidation-key-1", new Buffer("foo"))
blobStore.set("bar", "invalidation-key-2", new Buffer("bar"))
blobStore.save()
blobStore = FileSystemBlobStore.load(storageDirectory)
expect(blobStore.get("foo")).toEqual(new Buffer("foo"))
expect(blobStore.get("bar")).toEqual(new Buffer("bar"))
expect(blobStore.get("foo", "invalidation-key-1")).toEqual(new Buffer("foo"))
expect(blobStore.get("bar", "invalidation-key-2")).toEqual(new Buffer("bar"))
expect(blobStore.get("foo", "unexisting-key")).toBeUndefined()
expect(blobStore.get("bar", "unexisting-key")).toBeUndefined()
blobStore.set("foo", new Buffer("changed"))
blobStore.set("foo", "new-key", new Buffer("changed"))
expect(blobStore.get("foo")).toEqual(new Buffer("changed"))
expect(blobStore.get("foo", "new-key")).toEqual(new Buffer("changed"))
expect(blobStore.get("foo", "invalidation-key-1")).toBeUndefined()
it "persists both in-memory and previously stored buffers when saved", ->
blobStore.set("foo", new Buffer("foo"))
blobStore.set("bar", new Buffer("bar"))
blobStore.set("foo", "invalidation-key-1", new Buffer("foo"))
blobStore.set("bar", "invalidation-key-2", new Buffer("bar"))
blobStore.save()
blobStore = FileSystemBlobStore.load(storageDirectory)
blobStore.set("bar", new Buffer("changed"))
blobStore.set("qux", new Buffer("qux"))
blobStore.set("bar", "invalidation-key-3", new Buffer("changed"))
blobStore.set("qux", "invalidation-key-4", new Buffer("qux"))
blobStore.save()
blobStore = FileSystemBlobStore.load(storageDirectory)
expect(blobStore.get("foo")).toEqual(new Buffer("foo"))
expect(blobStore.get("bar")).toEqual(new Buffer("changed"))
expect(blobStore.get("qux")).toEqual(new Buffer("qux"))
expect(blobStore.get("foo", "invalidation-key-1")).toEqual(new Buffer("foo"))
expect(blobStore.get("bar", "invalidation-key-3")).toEqual(new Buffer("changed"))
expect(blobStore.get("qux", "invalidation-key-4")).toEqual(new Buffer("qux"))
expect(blobStore.get("foo", "unexisting-key")).toBeUndefined()
expect(blobStore.get("bar", "invalidation-key-2")).toBeUndefined()
expect(blobStore.get("qux", "unexisting-key")).toBeUndefined()
it "allows to delete keys from both memory and stored buffers", ->
blobStore.set("a", new Buffer("a"))
blobStore.set("b", new Buffer("b"))
blobStore.set("a", "invalidation-key-1", new Buffer("a"))
blobStore.set("b", "invalidation-key-2", new Buffer("b"))
blobStore.save()
blobStore = FileSystemBlobStore.load(storageDirectory)
blobStore.set("b", new Buffer("b"))
blobStore.set("c", new Buffer("c"))
blobStore.set("b", "invalidation-key-3", new Buffer("b"))
blobStore.set("c", "invalidation-key-4", new Buffer("c"))
blobStore.delete("b")
blobStore.delete("c")
blobStore.save()
blobStore = FileSystemBlobStore.load(storageDirectory)
expect(blobStore.get("a")).toEqual(new Buffer("a"))
expect(blobStore.get("b")).toBeUndefined()
expect(blobStore.get("c")).toBeUndefined()
expect(blobStore.get("a", "invalidation-key-1")).toEqual(new Buffer("a"))
expect(blobStore.get("b", "invalidation-key-2")).toBeUndefined()
expect(blobStore.get("b", "invalidation-key-3")).toBeUndefined()
expect(blobStore.get("c", "invalidation-key-4")).toBeUndefined()

1
spec/fixtures/native-cache/file-4.js vendored Normal file
View File

@ -0,0 +1 @@
module.exports = function () { return "file-4" }

View File

@ -1,3 +1,7 @@
fs = require 'fs'
path = require 'path'
Module = require 'module'
describe "NativeCompileCache", ->
nativeCompileCache = require '../src/native-compile-cache'
[fakeCacheStore, cachedFiles] = []
@ -5,39 +9,92 @@ describe "NativeCompileCache", ->
beforeEach ->
cachedFiles = []
fakeCacheStore = jasmine.createSpyObj("cache store", ["set", "get", "has", "delete"])
fakeCacheStore.has.andCallFake (cacheKey, invalidationKey) ->
fakeCacheStore.get(cacheKey, invalidationKey)?
fakeCacheStore.get.andCallFake (cacheKey, invalidationKey) ->
for entry in cachedFiles by -1
continue if entry.cacheKey isnt cacheKey
continue if entry.invalidationKey isnt invalidationKey
return entry.cacheBuffer
return
fakeCacheStore.set.andCallFake (cacheKey, invalidationKey, cacheBuffer) ->
cachedFiles.push({cacheKey, invalidationKey, cacheBuffer})
nativeCompileCache.setCacheStore(fakeCacheStore)
nativeCompileCache.setV8Version("a-v8-version")
nativeCompileCache.install()
it "writes and reads from the cache storage when requiring files", ->
fakeCacheStore.has.andReturn(false)
fakeCacheStore.set.andCallFake (filename, cacheBuffer) ->
cachedFiles.push({filename, cacheBuffer})
fn1 = require('./fixtures/native-cache/file-1')
fn2 = require('./fixtures/native-cache/file-2')
expect(cachedFiles.length).toBe(2)
expect(cachedFiles[0].filename).toBe(require.resolve('./fixtures/native-cache/file-1'))
expect(cachedFiles[0].cacheKey).toBe(require.resolve('./fixtures/native-cache/file-1'))
expect(cachedFiles[0].cacheBuffer).toBeInstanceOf(Uint8Array)
expect(cachedFiles[0].cacheBuffer.length).toBeGreaterThan(0)
expect(fn1()).toBe(1)
expect(cachedFiles[1].filename).toBe(require.resolve('./fixtures/native-cache/file-2'))
expect(cachedFiles[1].cacheKey).toBe(require.resolve('./fixtures/native-cache/file-2'))
expect(cachedFiles[1].cacheBuffer).toBeInstanceOf(Uint8Array)
expect(cachedFiles[1].cacheBuffer.length).toBeGreaterThan(0)
expect(fn2()).toBe(2)
fakeCacheStore.has.andReturn(true)
fakeCacheStore.get.andReturn(cachedFiles[0].cacheBuffer)
fakeCacheStore.set.reset()
delete Module._cache[require.resolve('./fixtures/native-cache/file-1')]
fn1 = require('./fixtures/native-cache/file-1')
expect(fakeCacheStore.set).not.toHaveBeenCalled()
expect(cachedFiles.length).toBe(2)
expect(fn1()).toBe(1)
it "deletes previously cached code when the cache is not valid", ->
describe "when v8 version changes", ->
it "updates the cache of previously required files", ->
nativeCompileCache.setV8Version("version-1")
fn4 = require('./fixtures/native-cache/file-4')
expect(cachedFiles.length).toBe(1)
expect(cachedFiles[0].cacheKey).toBe(require.resolve('./fixtures/native-cache/file-4'))
expect(cachedFiles[0].cacheBuffer).toBeInstanceOf(Uint8Array)
expect(cachedFiles[0].cacheBuffer.length).toBeGreaterThan(0)
expect(fn4()).toBe("file-4")
nativeCompileCache.setV8Version("version-2")
delete Module._cache[require.resolve('./fixtures/native-cache/file-4')]
fn4 = require('./fixtures/native-cache/file-4')
expect(cachedFiles.length).toBe(2)
expect(cachedFiles[1].cacheKey).toBe(require.resolve('./fixtures/native-cache/file-4'))
expect(cachedFiles[1].invalidationKey).not.toBe(cachedFiles[0].invalidationKey)
expect(cachedFiles[1].cacheBuffer).toBeInstanceOf(Uint8Array)
expect(cachedFiles[1].cacheBuffer.length).toBeGreaterThan(0)
describe "when a previously required and cached file changes", ->
beforeEach ->
fs.writeFileSync path.resolve('./spec/fixtures/native-cache/file-5'), """
module.exports = function () { return "file-5" }
"""
afterEach ->
fs.unlinkSync path.resolve('./spec/fixtures/native-cache/file-5')
it "removes it from the store and re-inserts it with the new cache", ->
fn5 = require('./fixtures/native-cache/file-5')
expect(cachedFiles.length).toBe(1)
expect(cachedFiles[0].cacheKey).toBe(require.resolve('./fixtures/native-cache/file-5'))
expect(cachedFiles[0].cacheBuffer).toBeInstanceOf(Uint8Array)
expect(cachedFiles[0].cacheBuffer.length).toBeGreaterThan(0)
expect(fn5()).toBe("file-5")
delete Module._cache[require.resolve('./fixtures/native-cache/file-5')]
fs.appendFileSync(require.resolve('./fixtures/native-cache/file-5'), "\n\n")
fn5 = require('./fixtures/native-cache/file-5')
expect(cachedFiles.length).toBe(2)
expect(cachedFiles[1].cacheKey).toBe(require.resolve('./fixtures/native-cache/file-5'))
expect(cachedFiles[1].invalidationKey).not.toBe(cachedFiles[0].invalidationKey)
expect(cachedFiles[1].cacheBuffer).toBeInstanceOf(Uint8Array)
expect(cachedFiles[1].cacheBuffer.length).toBeGreaterThan(0)
it "deletes previously cached code when the cache is an invalid file", ->
fakeCacheStore.has.andReturn(true)
fakeCacheStore.get.andCallFake -> new Buffer("an invalid cache")

View File

@ -1,5 +1,10 @@
'use strict'
// For now, we're not using babel or ES6 features like `let` and `const` in
// this file, because `apm` requires this file directly in order to pre-warm
// Atom's compile-cache when installing or updating packages, using an older
// version of node.js
var path = require('path')
var fs = require('fs-plus')
var CSON = null
@ -159,8 +164,7 @@ require('source-map-support').install({
})
var prepareStackTraceWithSourceMapping = Error.prepareStackTrace
let prepareStackTrace = prepareStackTraceWithSourceMapping
var prepareStackTrace = prepareStackTraceWithSourceMapping
function prepareStackTraceWithRawStackAssignment (error, frames) {
if (error.rawStack) { // avoid infinite recursion

View File

@ -13,8 +13,10 @@ class FileSystemBlobStore {
constructor (directory) {
this.inMemoryBlobs = new Map()
this.invalidationKeys = {}
this.blobFilename = path.join(directory, 'BLOB')
this.blobMapFilename = path.join(directory, 'MAP')
this.invalidationKeysFilename = path.join(directory, 'INVKEYS')
this.lockFilename = path.join(directory, 'LOCK')
this.storedBlob = new Buffer(0)
this.storedBlobMap = {}
@ -27,14 +29,19 @@ class FileSystemBlobStore {
if (!fs.existsSync(this.blobFilename)) {
return
}
if (!fs.existsSync(this.invalidationKeysFilename)) {
return
}
this.storedBlob = fs.readFileSync(this.blobFilename)
this.storedBlobMap = JSON.parse(fs.readFileSync(this.blobMapFilename))
this.invalidationKeys = JSON.parse(fs.readFileSync(this.invalidationKeysFilename))
}
save () {
let dump = this.getDump()
let blobToStore = Buffer.concat(dump[0])
let mapToStore = JSON.stringify(dump[1])
let invalidationKeysToStore = JSON.stringify(this.invalidationKeys)
let acquiredLock = false
try {
@ -43,6 +50,7 @@ class FileSystemBlobStore {
fs.writeFileSync(this.blobFilename, blobToStore)
fs.writeFileSync(this.blobMapFilename, mapToStore)
fs.writeFileSync(this.invalidationKeysFilename, invalidationKeysToStore)
} catch (error) {
// Swallow the exception silently only if we fail to acquire the lock.
if (error.code !== 'EEXIST') {
@ -55,15 +63,20 @@ class FileSystemBlobStore {
}
}
has (key) {
return this.inMemoryBlobs.hasOwnProperty(key) || this.storedBlobMap.hasOwnProperty(key)
has (key, invalidationKey) {
let containsKey = this.inMemoryBlobs.has(key) || this.storedBlobMap.hasOwnProperty(key)
let isValid = this.invalidationKeys[key] === invalidationKey
return containsKey && isValid
}
get (key) {
return this.getFromMemory(key) || this.getFromStorage(key)
get (key, invalidationKey) {
if (this.has(key, invalidationKey)) {
return this.getFromMemory(key) || this.getFromStorage(key)
}
}
set (key, buffer) {
set (key, invalidationKey, buffer) {
this.invalidationKeys[key] = invalidationKey
return this.inMemoryBlobs.set(key, buffer)
}

View File

@ -3,6 +3,11 @@
const Module = require('module')
const path = require('path')
const cachedVm = require('cached-run-in-this-context')
const crypto = require('crypto')
function computeHash (contents) {
return crypto.createHash('sha1').update(contents, 'utf8').digest('hex')
}
class NativeCompileCache {
constructor () {
@ -14,6 +19,10 @@ class NativeCompileCache {
this.cacheStore = store
}
setV8Version (v8Version) {
this.v8Version = v8Version.toString()
}
install () {
this.savePreviousModuleCompile()
this.overrideModuleCompile()
@ -28,20 +37,20 @@ class NativeCompileCache {
}
overrideModuleCompile () {
let cacheStore = this.cacheStore
let self = this
let resolvedArgv = null
// Here we override Node's module.js
// (https://github.com/atom/node/blob/atom/lib/module.js#L378), changing
// only the bits that affect compilation in order to use the cached one.
Module.prototype._compile = function (content, filename) {
let self = this
let moduleSelf = this
// remove shebang
content = content.replace(/^\#\!.*/, '')
function require (path) {
return self.require(path)
return moduleSelf.require(path)
}
require.resolve = function (request) {
return Module._resolveFilename(request, self)
return Module._resolveFilename(request, moduleSelf)
}
require.main = process.mainModule
@ -54,18 +63,20 @@ class NativeCompileCache {
// create wrapper function
let wrapper = Module.wrap(content)
let cacheKey = filename
let invalidationKey = computeHash(wrapper + self.v8Version)
let compiledWrapper = null
if (cacheStore.has(filename)) {
let buffer = cacheStore.get(filename)
if (self.cacheStore.has(cacheKey, invalidationKey)) {
let buffer = self.cacheStore.get(cacheKey, invalidationKey)
let compilationResult = cachedVm.runInThisContextCached(wrapper, filename, buffer)
compiledWrapper = compilationResult.result
if (compilationResult.wasRejected) {
cacheStore.delete(filename)
self.cacheStore.delete(cacheKey)
}
} else {
let compilationResult = cachedVm.runInThisContext(wrapper, filename)
if (compilationResult.cacheBuffer) {
cacheStore.set(filename, compilationResult.cacheBuffer)
self.cacheStore.set(cacheKey, invalidationKey, compilationResult.cacheBuffer)
}
compiledWrapper = compilationResult.result
}
@ -88,8 +99,8 @@ class NativeCompileCache {
global.v8debug.Debug.setBreakPoint(compiledWrapper, 0, 0)
}
}
let args = [self.exports, require, self, filename, dirname, process, global]
return compiledWrapper.apply(self.exports, args)
let args = [moduleSelf.exports, require, moduleSelf, filename, dirname, process, global]
return compiledWrapper.apply(moduleSelf.exports, args)
}
}

View File

@ -19,6 +19,7 @@
path.join(process.env.ATOM_HOME, 'blob-store/')
)
NativeCompileCache.setCacheStore(blobStore)
NativeCompileCache.setV8Version(process.versions.v8)
NativeCompileCache.install()
// Normalize to make sure drive letter case is consistent on Windows