Force UTC at process level

issues #6406 #6399
- all dates are stored as UTC with this commit
- use moment.tz.setDefault('UTC')
- add migration file to recalculate local datetimes to UTC
- store all dates in same format into our three supported databases
- add option to remeber migrations inside settings (core)
- support DST offset for migration
- ensure we force UTC in test env
- run whole migration as transaction
- extend: Settings.findOne function
This commit is contained in:
kirrg001 2016-05-17 12:01:54 +02:00
parent 38cdcfd3d6
commit ec176c243a
19 changed files with 584 additions and 41 deletions

View File

@ -176,7 +176,8 @@ var _ = require('lodash'),
ui: 'bdd', ui: 'bdd',
reporter: grunt.option('reporter') || 'spec', reporter: grunt.option('reporter') || 'spec',
timeout: '15000', timeout: '15000',
save: grunt.option('reporter-output') save: grunt.option('reporter-output'),
require: ['core/server/overrides']
}, },
// #### All Unit tests // #### All Unit tests

View File

@ -1,5 +1,5 @@
var knex = require('knex'), var knex = require('knex'),
config = require('../../config'), config = require('../../config'),
dbConfig = config.database, dbConfig = config.database,
knexInstance; knexInstance;
@ -7,7 +7,11 @@ function configure(dbConfig) {
var client = dbConfig.client, var client = dbConfig.client,
pg; pg;
if (client === 'pg' || client === 'postgres' || client === 'postgresql') { dbConfig.isPostgreSQL = function () {
return client === 'pg' || client === 'postgres' || client === 'postgresql';
};
if (dbConfig.isPostgreSQL()) {
try { try {
pg = require('pg'); pg = require('pg');
} catch (e) { } catch (e) {
@ -20,12 +24,26 @@ function configure(dbConfig) {
pg.types.setTypeParser(20, function (val) { pg.types.setTypeParser(20, function (val) {
return val === null ? null : parseInt(val, 10); return val === null ? null : parseInt(val, 10);
}); });
// https://github.com/tgriesser/knex/issues/97
// this sets the timezone to UTC only for the connection!
dbConfig.pool = {
afterCreate: function (connection, callback) {
connection.query('set timezone=\'UTC\'', function (err) {
callback(err, connection);
});
}
};
} }
if (client === 'sqlite3') { if (client === 'sqlite3') {
dbConfig.useNullAsDefault = dbConfig.useNullAsDefault || false; dbConfig.useNullAsDefault = dbConfig.useNullAsDefault || false;
} }
if (client === 'mysql') {
dbConfig.connection.timezone = 'UTC';
}
return dbConfig; return dbConfig;
} }

View File

@ -0,0 +1 @@
module.exports = [];

View File

@ -0,0 +1,209 @@
var config = require('../../../../config'),
models = require(config.paths.corePath + '/server/models'),
sequence = require(config.paths.corePath + '/server/utils/sequence'),
moment = require('moment'),
_ = require('lodash'),
Promise = require('bluebird'),
messagePrefix = 'Transforming dates to UTC: ',
settingsKey = '006/01',
_private = {};
_private.getTZOffset = function getTZOffset(date) {
return date.getTimezoneOffset();
};
_private.getTZOffsetMax = function getTZOffsetMax() {
return Math.max(Math.abs(new Date('2015-07-01').getTimezoneOffset()), Math.abs(new Date('2015-01-01').getTimezoneOffset()));
};
_private.addOffset = function addOffset(date) {
if (_private.noOffset) {
return moment(date).toDate();
}
return moment(date).add(_private.getTZOffset(date), 'minutes').toDate();
};
/**
* postgres: stores dates with offset, so it's enough to force timezone UTC in the db connection (see data/db/connection.js)
* sqlite: stores UTC timestamps, but we will normalize the format to YYYY-MM-DD HH:mm:ss
*/
module.exports = function transformDatesIntoUTC(options, logger) {
var ServerTimezoneOffset = _private.getTZOffsetMax(),
settingsMigrations = null;
return models.Base.transaction(function (transaction) {
options.transacting = transaction;
// will ensure updated_at fields will not be updated, we take them from the original models
options.importing = true;
options.context = {internal: true};
return sequence([
function databaseCheck() {
if (ServerTimezoneOffset === 0) {
return Promise.reject(new Error('skip'));
}
if (config.database.isPostgreSQL()) {
return Promise.reject(new Error('skip'));
}
if (config.database.client === 'sqlite3') {
_private.noOffset = true;
} else {
_private.noOffset = false;
}
logger.info(messagePrefix + '(could take a while)...');
return Promise.resolve();
},
function checkIfMigrationAlreadyRan() {
return models.Settings.findOne({key: 'migrations'}, options)
.then(function (result) {
try {
settingsMigrations = JSON.parse(result.attributes.value) || {};
} catch (err) {
return Promise.reject(err);
}
// CASE: migration ran already
if (settingsMigrations.hasOwnProperty(settingsKey)) {
return Promise.reject(new Error('skip'));
}
return Promise.resolve();
});
},
function updatePosts() {
return models.Post.findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No Posts found');
return;
}
return Promise.mapSeries(result.models, function mapper(post) {
if (post.get('published_at')) {
post.set('published_at', _private.addOffset(post.get('published_at')));
}
if (post.get('updated_at')) {
post.set('updated_at', _private.addOffset(post.get('updated_at')));
}
post.set('created_at', _private.addOffset(post.get('created_at')));
return models.Post.edit(post.toJSON(), _.merge({}, options, {id: post.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for Posts');
});
});
},
function updateUsers() {
return models.User.findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No Users found');
return;
}
return Promise.mapSeries(result.models, function mapper(user) {
if (user.get('last_login')) {
user.set('last_login', _private.addOffset(user.get('last_login')));
}
if (user.get('updated_at')) {
user.set('updated_at', _private.addOffset(user.get('updated_at')));
}
user.set('created_at', _private.addOffset(user.get('created_at')));
return models.User.edit(user.toJSON(), _.merge({}, options, {id: user.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for Users');
});
});
},
function updateSubscribers() {
return models.Subscriber.findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No Subscribers found');
return;
}
return Promise.mapSeries(result.models, function mapper(subscriber) {
if (subscriber.get('unsubscribed_at')) {
subscriber.set('unsubscribed_at', _private.addOffset(subscriber.get('unsubscribed_at')));
}
if (subscriber.get('updated_at')) {
subscriber.set('updated_at', _private.addOffset(subscriber.get('updated_at')));
}
subscriber.set('created_at', _private.addOffset(subscriber.get('created_at')));
return models.Subscriber.edit(subscriber.toJSON(), _.merge({}, options, {id: subscriber.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for Subscribers');
});
});
},
function updateSettings() {
return models.Settings.findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No Settings found');
return;
}
return Promise.mapSeries(result.models, function mapper(settings) {
// migrations was new created, so it already is in UTC
if (settings.get('key') === 'migrations') {
return Promise.resolve();
}
if (settings.get('updated_at')) {
settings.set('updated_at', _private.addOffset(settings.get('updated_at')));
}
settings.set('created_at', _private.addOffset(settings.get('created_at')));
return models.Settings.edit(settings.toJSON(), _.merge({}, options, {id: settings.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for Settings');
});
});
},
function updateAllOtherModels() {
return Promise.mapSeries(['Role', 'Permission', 'Tag', 'App', 'AppSetting', 'AppField', 'Client'], function (model) {
return models[model].findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No {model} found'.replace('{model}', model));
return;
}
return Promise.mapSeries(result.models, function mapper(object) {
object.set('created_at', _private.addOffset(object.get('created_at')));
if (object.get('updated_at')) {
object.set('updated_at', _private.addOffset(object.get('updated_at')));
}
return models[model].edit(object.toJSON(), _.merge({}, options, {id: object.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for {model}'.replace('{model}', model));
});
});
});
},
function addMigrationSettingsEntry() {
settingsMigrations[settingsKey] = moment().format();
return models.Settings.edit({
key: 'migrations',
value: JSON.stringify(settingsMigrations)
}, options);
}]
).catch(function (err) {
if (err.message === 'skip') {
logger.warn(messagePrefix + 'Your databases uses UTC datetimes, skip!');
return Promise.resolve();
}
return Promise.reject(err);
});
});
};

View File

@ -0,0 +1,3 @@
module.exports = [
require('./01-transform-dates-into-utc')
];

View File

@ -1,7 +1,7 @@
{ {
"core": { "core": {
"databaseVersion": { "databaseVersion": {
"defaultValue": "005" "defaultValue": "006"
}, },
"dbHash": { "dbHash": {
"defaultValue": null "defaultValue": null
@ -11,6 +11,9 @@
}, },
"displayUpdateNotification": { "displayUpdateNotification": {
"defaultValue": null "defaultValue": null
},
"migrations": {
"defaultValue": "{}"
} }
}, },
"blog": { "blog": {

View File

@ -5,9 +5,8 @@ var schema = require('../schema').tables,
Promise = require('bluebird'), Promise = require('bluebird'),
errors = require('../../errors'), errors = require('../../errors'),
config = require('../../config'), config = require('../../config'),
readThemes = require('../../utils/read-themes'), readThemes = require('../../utils/read-themes'),
i18n = require('../../i18n'), i18n = require('../../i18n'),
toString = require('lodash.tostring'),
validateSchema, validateSchema,
validateSettings, validateSettings,
@ -54,7 +53,7 @@ validateSchema = function validateSchema(tableName, model) {
_.each(columns, function each(columnKey) { _.each(columns, function each(columnKey) {
var message = '', var message = '',
strVal = toString(model[columnKey]); strVal = _.toString(model[columnKey]);
// check nullable // check nullable
if (model.hasOwnProperty(columnKey) && schema[tableName][columnKey].hasOwnProperty('nullable') if (model.hasOwnProperty(columnKey) && schema[tableName][columnKey].hasOwnProperty('nullable')
@ -166,7 +165,7 @@ validateActiveTheme = function validateActiveTheme(themeName) {
// available validators: https://github.com/chriso/validator.js#validators // available validators: https://github.com/chriso/validator.js#validators
validate = function validate(value, key, validations) { validate = function validate(value, key, validations) {
var validationErrors = []; var validationErrors = [];
value = toString(value); value = _.toString(value);
_.each(validations, function each(validationOptions, validationName) { _.each(validations, function each(validationOptions, validationName) {
var goodResult = true; var goodResult = true;

View File

@ -101,16 +101,41 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
this.set('updated_by', this.contextUser(options)); this.set('updated_by', this.contextUser(options));
}, },
// Base prototype properties will go here /**
// Fix problems with dates * before we insert dates into the database, we have to normalize
fixDates: function fixDates(attrs) { * date format is now in each db the same
*/
fixDatesWhenSave: function fixDates(attrs) {
var self = this; var self = this;
_.each(attrs, function each(value, key) { _.each(attrs, function each(value, key) {
if (value !== null if (value !== null
&& schema.tables[self.tableName].hasOwnProperty(key) && schema.tables[self.tableName].hasOwnProperty(key)
&& schema.tables[self.tableName][key].type === 'dateTime') { && schema.tables[self.tableName][key].type === 'dateTime') {
// convert dateTime value into a native javascript Date object attrs[key] = moment(value).format('YYYY-MM-DD HH:mm:ss');
}
});
return attrs;
},
/**
* all supported databases (pg, sqlite, mysql) return different values
*
* sqlite:
* - knex returns a UTC String
* pg:
* - has an active UTC session through knex and returns UTC Date
* mysql:
* - knex wraps the UTC value into a local JS Date
*/
fixDatesWhenFetch: function fixDates(attrs) {
var self = this;
_.each(attrs, function each(value, key) {
if (value !== null
&& schema.tables[self.tableName].hasOwnProperty(key)
&& schema.tables[self.tableName][key].type === 'dateTime') {
attrs[key] = moment(value).toDate(); attrs[key] = moment(value).toDate();
} }
}); });
@ -148,12 +173,12 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
// format date before writing to DB, bools work // format date before writing to DB, bools work
format: function format(attrs) { format: function format(attrs) {
return this.fixDates(attrs); return this.fixDatesWhenSave(attrs);
}, },
// format data and bool when fetching from DB // format data and bool when fetching from DB
parse: function parse(attrs) { parse: function parse(attrs) {
return this.fixBools(this.fixDates(attrs)); return this.fixBools(this.fixDatesWhenFetch(attrs));
}, },
toJSON: function toJSON(options) { toJSON: function toJSON(options) {
@ -199,11 +224,14 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
/** /**
* Returns an array of keys permitted in every method's `options` hash. * Returns an array of keys permitted in every method's `options` hash.
* Can be overridden and added to by a model's `permittedOptions` method. * Can be overridden and added to by a model's `permittedOptions` method.
*
* importing: is used when import a JSON file or when migrating the database
*
* @return {Object} Keys allowed in the `options` hash of every model's method. * @return {Object} Keys allowed in the `options` hash of every model's method.
*/ */
permittedOptions: function permittedOptions() { permittedOptions: function permittedOptions() {
// terms to whitelist for all methods. // terms to whitelist for all methods.
return ['context', 'include', 'transacting']; return ['context', 'include', 'transacting', 'importing'];
}, },
/** /**
@ -352,11 +380,18 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
* @return {Promise(ghostBookshelf.Model)} Edited Model * @return {Promise(ghostBookshelf.Model)} Edited Model
*/ */
edit: function edit(data, options) { edit: function edit(data, options) {
var id = options.id; var id = options.id,
model = this.forge({id: id});
data = this.filterData(data); data = this.filterData(data);
options = this.filterOptions(options, 'edit'); options = this.filterOptions(options, 'edit');
return this.forge({id: id}).fetch(options).then(function then(object) { // We allow you to disable timestamps when run migration, so that the posts `updated_at` value is the same
if (options.importing) {
model.hasTimestamps = false;
}
return model.fetch(options).then(function then(object) {
if (object) { if (object) {
return object.save(data, options); return object.save(data, options);
} }
@ -374,6 +409,7 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
data = this.filterData(data); data = this.filterData(data);
options = this.filterOptions(options, 'add'); options = this.filterOptions(options, 'add');
var model = this.forge(data); var model = this.forge(data);
// We allow you to disable timestamps when importing posts so that the new posts `updated_at` value is the same // We allow you to disable timestamps when importing posts so that the new posts `updated_at` value is the same
// as the import json blob. More details refer to https://github.com/TryGhost/Ghost/issues/1696 // as the import json blob. More details refer to https://github.com/TryGhost/Ghost/issues/1696
if (options.importing) { if (options.importing) {

View File

@ -12,7 +12,6 @@ var _ = require('lodash'),
config = require('../config'), config = require('../config'),
baseUtils = require('./base/utils'), baseUtils = require('./base/utils'),
i18n = require('../i18n'), i18n = require('../i18n'),
toString = require('lodash.tostring'),
Post, Post,
Posts; Posts;
@ -179,11 +178,11 @@ Post = ghostBookshelf.Model.extend({
ghostBookshelf.Model.prototype.saving.call(this, model, attr, options); ghostBookshelf.Model.prototype.saving.call(this, model, attr, options);
this.set('html', converter.makeHtml(toString(this.get('markdown')))); this.set('html', converter.makeHtml(_.toString(this.get('markdown'))));
// disabling sanitization until we can implement a better version // disabling sanitization until we can implement a better version
title = this.get('title') || i18n.t('errors.models.post.untitled'); title = this.get('title') || i18n.t('errors.models.post.untitled');
this.set('title', toString(title).trim()); this.set('title', _.toString(title).trim());
// ### Business logic for published_at and published_by // ### Business logic for published_at and published_by
// If the current status is 'published' and published_at is not set, set it to now // If the current status is 'published' and published_at is not set, set it to now
@ -462,8 +461,7 @@ Post = ghostBookshelf.Model.extend({
validOptions = { validOptions = {
findOne: ['columns', 'importing', 'withRelated', 'require'], findOne: ['columns', 'importing', 'withRelated', 'require'],
findPage: ['page', 'limit', 'columns', 'filter', 'order', 'status', 'staticPages'], findPage: ['page', 'limit', 'columns', 'filter', 'order', 'status', 'staticPages'],
findAll: ['columns', 'filter'], findAll: ['columns', 'filter']
add: ['importing']
}; };
if (validOptions[methodName]) { if (validOptions[methodName]) {

View File

@ -89,12 +89,17 @@ Settings = ghostBookshelf.Model.extend({
}); });
} }
}, { }, {
findOne: function (options) { findOne: function (data, options) {
// Allow for just passing the key instead of attributes if (_.isEmpty(data)) {
if (!_.isObject(options)) { options = data;
options = {key: options};
} }
return Promise.resolve(ghostBookshelf.Model.findOne.call(this, options));
// Allow for just passing the key instead of attributes
if (!_.isObject(data)) {
data = {key: data};
}
return Promise.resolve(ghostBookshelf.Model.findOne.call(this, data, options));
}, },
edit: function (data, options) { edit: function (data, options) {
@ -125,6 +130,11 @@ Settings = ghostBookshelf.Model.extend({
if (options.context.internal && item.hasOwnProperty('type')) { if (options.context.internal && item.hasOwnProperty('type')) {
saveData.type = item.type; saveData.type = item.type;
} }
// it's allowed to edit all attributes in case of importing/migrating
if (options.importing) {
saveData = item;
}
return setting.save(saveData, options); return setting.save(saveData, options);
} }

View File

@ -10,7 +10,6 @@ var _ = require('lodash'),
validation = require('../data/validation'), validation = require('../data/validation'),
events = require('../events'), events = require('../events'),
i18n = require('../i18n'), i18n = require('../i18n'),
toString = require('lodash.tostring'),
bcryptGenSalt = Promise.promisify(bcrypt.genSalt), bcryptGenSalt = Promise.promisify(bcrypt.genSalt),
bcryptHash = Promise.promisify(bcrypt.hash), bcryptHash = Promise.promisify(bcrypt.hash),
@ -369,7 +368,7 @@ User = ghostBookshelf.Model.extend({
userData = this.filterData(data), userData = this.filterData(data),
roles; roles;
userData.password = toString(userData.password); userData.password = _.toString(userData.password);
options = this.filterOptions(options, 'add'); options = this.filterOptions(options, 'add');
options.withRelated = _.union(options.withRelated, options.include); options.withRelated = _.union(options.withRelated, options.include);

18
core/server/overrides.js Normal file
View File

@ -0,0 +1,18 @@
var moment = require('moment-timezone'),
_ = require('lodash'),
toString = require('lodash.tostring');
/**
* the version of lodash included in Ghost (3.10.1) does not have _.toString - it is added in a later version.
*/
_.toString = toString;
/**
* force UTC
* - you can require moment or moment-timezone, both is configured to UTC
* - you are allowed to use new Date() to instantiate datetime values for models, because they are transformed into UTC in the model layer
* - be careful when not working with models, every value from the native JS Date is local TZ
* - be careful when you work with date operations, therefor always wrap a date into moment
*/
moment.tz.setDefault('UTC');

View File

@ -5,7 +5,7 @@ function StorageBase() {
} }
StorageBase.prototype.getTargetDir = function (baseDir) { StorageBase.prototype.getTargetDir = function (baseDir) {
var m = moment(new Date().getTime()), var m = moment(),
month = m.format('MM'), month = m.format('MM'),
year = m.format('YYYY'); year = m.format('YYYY');

View File

@ -364,6 +364,53 @@ describe('Post API', function () {
// ## Add // ## Add
describe('Add', function () { describe('Add', function () {
it('create and ensure dates are correct', function (done) {
var newPost = {posts: [{status: 'published', published_at: '2016-05-30T07:00:00.000Z'}]};
request.post(testUtils.API.getApiQuery('posts'))
.set('Authorization', 'Bearer ' + accesstoken)
.send(newPost)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(201)
.end(function (err, res) {
if (err) {
return done(err);
}
res.body.posts[0].published_at.should.eql('2016-05-30T07:00:00.000Z');
res.body.posts[0].published_at = '2016-05-30T09:00:00.000Z';
request.put(testUtils.API.getApiQuery('posts/' + res.body.posts[0].id + '/'))
.set('Authorization', 'Bearer ' + accesstoken)
.send(res.body)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
res.body.posts[0].published_at.should.eql('2016-05-30T09:00:00.000Z');
request.get(testUtils.API.getApiQuery('posts/' + res.body.posts[0].id + '/'))
.set('Authorization', 'Bearer ' + accesstoken)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
res.body.posts[0].published_at.should.eql('2016-05-30T09:00:00.000Z');
done();
});
});
});
});
it('can create a new draft, publish post, update post', function (done) { it('can create a new draft, publish post, update post', function (done) {
var newTitle = 'My Post', var newTitle = 'My Post',
newTagName = 'My Tag', newTagName = 'My Tag',

View File

@ -3,6 +3,7 @@ var testUtils = require('../utils/index'),
should = require('should'), should = require('should'),
sinon = require('sinon'), sinon = require('sinon'),
Promise = require('bluebird'), Promise = require('bluebird'),
moment = require('moment'),
assert = require('assert'), assert = require('assert'),
_ = require('lodash'), _ = require('lodash'),
validator = require('validator'), validator = require('validator'),
@ -157,7 +158,7 @@ describe('Import', function () {
it('safely imports data, from 001', function (done) { it('safely imports data, from 001', function (done) {
var exportData, var exportData,
timestamp = 1349928000000; timestamp = moment().startOf('day').valueOf(); // no ms
testUtils.fixtures.loadExportFixture('export-001').then(function (exported) { testUtils.fixtures.loadExportFixture('export-001').then(function (exported) {
exportData = exported; exportData = exported;
@ -215,9 +216,9 @@ describe('Import', function () {
// in MySQL we're returned a date object. // in MySQL we're returned a date object.
// We pass the returned post always through the date object // We pass the returned post always through the date object
// to ensure the return is consistent for all DBs. // to ensure the return is consistent for all DBs.
assert.equal(new Date(posts[0].created_at).getTime(), timestamp); assert.equal(moment(posts[0].created_at).valueOf(), timestamp);
assert.equal(new Date(posts[0].updated_at).getTime(), timestamp); assert.equal(moment(posts[0].updated_at).valueOf(), timestamp);
assert.equal(new Date(posts[0].published_at).getTime(), timestamp); assert.equal(moment(posts[0].published_at).valueOf(), timestamp);
done(); done();
}).catch(done); }).catch(done);
@ -321,7 +322,7 @@ describe('Import', function () {
it('safely imports data from 002', function (done) { it('safely imports data from 002', function (done) {
var exportData, var exportData,
timestamp = 1349928000000; timestamp = moment().startOf('day').valueOf(); // no ms
testUtils.fixtures.loadExportFixture('export-002').then(function (exported) { testUtils.fixtures.loadExportFixture('export-002').then(function (exported) {
exportData = exported; exportData = exported;
@ -379,9 +380,9 @@ describe('Import', function () {
// in MySQL we're returned a date object. // in MySQL we're returned a date object.
// We pass the returned post always through the date object // We pass the returned post always through the date object
// to ensure the return is consistant for all DBs. // to ensure the return is consistant for all DBs.
assert.equal(new Date(posts[0].created_at).getTime(), timestamp); assert.equal(moment(posts[0].created_at).valueOf(), timestamp);
assert.equal(new Date(posts[0].updated_at).getTime(), timestamp); assert.equal(moment(posts[0].updated_at).valueOf(), timestamp);
assert.equal(new Date(posts[0].published_at).getTime(), timestamp); assert.equal(moment(posts[0].published_at).valueOf(), timestamp);
done(); done();
}).catch(done); }).catch(done);

View File

@ -1,6 +1,8 @@
/*global describe, it, beforeEach, afterEach */ /*global describe, it, beforeEach, afterEach, before */
var should = require('should'), var should = require('should'),
sinon = require('sinon'), sinon = require('sinon'),
_ = require('lodash'),
moment = require('moment'),
rewire = require('rewire'), rewire = require('rewire'),
Promise = require('bluebird'), Promise = require('bluebird'),
@ -14,6 +16,7 @@ var should = require('should'),
fixtureUtils = require('../../server/data/migration/fixtures/utils'), fixtureUtils = require('../../server/data/migration/fixtures/utils'),
fixtures004 = require('../../server/data/migration/fixtures/004'), fixtures004 = require('../../server/data/migration/fixtures/004'),
fixtures005 = require('../../server/data/migration/fixtures/005'), fixtures005 = require('../../server/data/migration/fixtures/005'),
fixtures006 = require('../../server/data/migration/fixtures/006'),
ensureDefaultSettings = require('../../server/data/migration/fixtures/settings'), ensureDefaultSettings = require('../../server/data/migration/fixtures/settings'),
sandbox = sinon.sandbox.create(); sandbox = sinon.sandbox.create();
@ -947,6 +950,199 @@ describe('Fixtures', function () {
}); });
}); });
}); });
describe('Update to 006', function () {
it('should call all the 006 fixture upgrades', function (done) {
// Setup
// Create a new stub, this will replace sequence, so that db calls don't actually get run
var sequenceStub = sandbox.stub(),
sequenceReset = update.__set__('sequence', sequenceStub);
// The first time we call sequence, it should be to execute a top level version, e.g 006
// yieldsTo('0') means this stub will execute the function at index 0 of the array passed as the
// first argument. In short the `runVersionTasks` function gets executed, and sequence gets called
// again with the array of tasks to execute for 006, which is what we want to check
sequenceStub.onFirstCall().yieldsTo('0').returns(Promise.resolve([]));
update(['006'], loggerStub).then(function (result) {
should.exist(result);
loggerStub.info.calledTwice.should.be.true();
loggerStub.warn.called.should.be.false();
sequenceStub.calledTwice.should.be.true();
sequenceStub.firstCall.calledWith(sinon.match.array, sinon.match.object, loggerStub).should.be.true();
sequenceStub.firstCall.args[0].should.be.an.Array().with.lengthOf(1);
sequenceStub.firstCall.args[0][0].should.be.a.Function().with.property('name', 'runVersionTasks');
sequenceStub.secondCall.calledWith(sinon.match.array, sinon.match.object, loggerStub).should.be.true();
sequenceStub.secondCall.args[0].should.be.an.Array().with.lengthOf(1);
sequenceStub.secondCall.args[0][0].should.be.a.Function().with.property('name', 'transformDatesIntoUTC');
// Reset
sequenceReset();
done();
}).catch(done);
});
describe('Tasks:', function () {
it('should have tasks for 006', function () {
should.exist(fixtures006);
fixtures006.should.be.an.Array().with.lengthOf(1);
});
describe('01-transform-dates-into-utc', function () {
var updateClient = fixtures006[0],
serverTimezoneOffset,
migrationsSettingsValue;
beforeEach(function () {
sandbox.stub(models.Base, 'transaction', function (stubDone) {
return new Promise(function (resolve) {
stubDone();
setTimeout(function () {
resolve();
}, 500);
});
});
configUtils.config.database.isPostgreSQL = function () {
return false;
};
sandbox.stub(Date.prototype, 'getTimezoneOffset', function () {
return serverTimezoneOffset;
});
sandbox.stub(models.Settings, 'findOne', function () {
return Promise.resolve({attributes: {value: migrationsSettingsValue}});
});
});
describe('error cases', function () {
before(function () {
serverTimezoneOffset = 0;
});
it('server offset is 0', function (done) {
migrationsSettingsValue = '{}';
updateClient({}, loggerStub)
.then(function () {
loggerStub.warn.called.should.be.true();
done();
})
.catch(done);
});
it('migration already ran', function (done) {
migrationsSettingsValue = '{ "006/01": "timestamp" }';
updateClient({}, loggerStub)
.then(function () {
loggerStub.warn.called.should.be.true();
done();
})
.catch(done);
});
});
describe('success cases', function () {
var newModels, createdAt, migrationsSettingsWasUpdated;
before(function () {
serverTimezoneOffset = -60;
migrationsSettingsValue = '{}';
});
beforeEach(function () {
newModels = {};
migrationsSettingsWasUpdated = false;
serverTimezoneOffset = -60;
migrationsSettingsValue = '{}';
sandbox.stub(models.Settings.prototype, 'fetch', function () {
// CASE: we update migrations settings entry
if (this.get('key') === 'migrations') {
migrationsSettingsWasUpdated = true;
return Promise.resolve(newModels[Object.keys(newModels)[0]]);
}
return Promise.resolve(newModels[Number(this.get('key'))]);
});
sandbox.stub(models.Base.Model.prototype, 'save', function (data) {
if (data.key !== 'migrations') {
should.exist(data.created_at);
}
return Promise.resolve({});
});
sandbox.stub(models.Base.Model, 'findAll', function () {
var model = models.Base.Model.forge();
model.set('id', Date.now());
model.set('created_at', createdAt);
model.set('key', model.id.toString());
newModels[model.id] = model;
return Promise.resolve({models: [model]});
});
sandbox.stub(models.Base.Model, 'findOne', function (data) {
return Promise.resolve(newModels[data.id]);
});
sandbox.stub(models.Base.Model, 'edit').returns(Promise.resolve({}));
});
it('sqlite: no UTC update, only format', function (done) {
createdAt = moment(1464798678537).toDate();
configUtils.config.database.client = 'sqlite3';
moment(createdAt).format('YYYY-MM-DD HH:mm:ss').should.eql('2016-06-01 16:31:18');
updateClient({}, loggerStub)
.then(function () {
_.each(newModels, function (model) {
moment(model.get('created_at')).format('YYYY-MM-DD HH:mm:ss').should.eql('2016-06-01 16:31:18');
});
migrationsSettingsWasUpdated.should.eql(true);
done();
})
.catch(done);
});
it('mysql: UTC update', function (done) {
/**
* we fetch 2016-06-01 06:00:00 from the database which was stored as local representation
* our base model will wrap it into a UTC moment
* the offset is 1 hour
* we expect 2016-06-01 05:00:00
*/
createdAt = moment('2016-06-01 06:00:00').toDate();
configUtils.config.database.client = 'mysql';
moment(createdAt).format('YYYY-MM-DD HH:mm:ss').should.eql('2016-06-01 06:00:00');
updateClient({}, loggerStub)
.then(function () {
_.each(newModels, function (model) {
moment(model.get('created_at')).format('YYYY-MM-DD HH:mm:ss').should.eql('2016-06-01 05:00:00');
});
migrationsSettingsWasUpdated.should.eql(true);
done();
})
.catch(done);
});
});
});
});
});
}); });
describe('Populate fixtures', function () { describe('Populate fixtures', function () {

View File

@ -31,7 +31,7 @@ var should = require('should'),
// both of which are required for migrations to work properly. // both of which are required for migrations to work properly.
describe('DB version integrity', function () { describe('DB version integrity', function () {
// Only these variables should need updating // Only these variables should need updating
var currentDbVersion = '005', var currentDbVersion = '006',
currentSchemaHash = 'f63f41ac97b5665a30c899409bbf9a83', currentSchemaHash = 'f63f41ac97b5665a30c899409bbf9a83',
currentFixturesHash = '56f781fa3bba0fdbf98da5f232ec9b11'; currentFixturesHash = '56f781fa3bba0fdbf98da5f232ec9b11';

View File

@ -1,10 +1,13 @@
// # Ghost Startup // # Ghost Startup
// Orchestrates the startup of Ghost when run from command line. // Orchestrates the startup of Ghost when run from command line.
var express, var express,
ghost, ghost,
parentApp, parentApp,
errors; errors;
require('./core/server/overrides');
// Make sure dependencies are installed and file system permissions are correct. // Make sure dependencies are installed and file system permissions are correct.
require('./core/server/utils/startup-check').check(); require('./core/server/utils/startup-check').check();

View File

@ -51,6 +51,7 @@
"lodash": "3.10.1", "lodash": "3.10.1",
"lodash.tostring": "4.1.3", "lodash.tostring": "4.1.3",
"moment": "2.13.0", "moment": "2.13.0",
"moment-timezone": "0.5.4",
"morgan": "1.7.0", "morgan": "1.7.0",
"multer": "1.1.0", "multer": "1.1.0",
"netjet": "1.1.1", "netjet": "1.1.1",