Revert "Force UTC at process level"

This commit is contained in:
Hannah Wolfe 2016-06-02 14:38:02 +01:00
parent 3fcf8632a8
commit 78e693f469
19 changed files with 40 additions and 583 deletions

View File

@ -176,8 +176,7 @@ var _ = require('lodash'),
ui: 'bdd',
reporter: grunt.option('reporter') || 'spec',
timeout: '15000',
save: grunt.option('reporter-output'),
require: ['core/server/overrides']
save: grunt.option('reporter-output')
},
// #### All Unit tests

View File

@ -1,5 +1,5 @@
var knex = require('knex'),
config = require('../../config'),
var knex = require('knex'),
config = require('../../config'),
dbConfig = config.database,
knexInstance;
@ -7,11 +7,7 @@ function configure(dbConfig) {
var client = dbConfig.client,
pg;
dbConfig.isPostgreSQL = function () {
return client === 'pg' || client === 'postgres' || client === 'postgresql';
};
if (dbConfig.isPostgreSQL()) {
if (client === 'pg' || client === 'postgres' || client === 'postgresql') {
try {
pg = require('pg');
} catch (e) {
@ -24,26 +20,12 @@ function configure(dbConfig) {
pg.types.setTypeParser(20, function (val) {
return val === null ? null : parseInt(val, 10);
});
// https://github.com/tgriesser/knex/issues/97
// this sets the timezone to UTC only for the connection!
dbConfig.pool = {
afterCreate: function (connection, callback) {
connection.query('set timezone=\'UTC\'', function (err) {
callback(err, connection);
});
}
};
}
if (client === 'sqlite3') {
dbConfig.useNullAsDefault = dbConfig.useNullAsDefault || false;
}
if (client === 'mysql') {
dbConfig.connection.timezone = 'UTC';
}
return dbConfig;
}

View File

@ -1 +0,0 @@
module.exports = [];

View File

@ -1,209 +0,0 @@
var config = require('../../../../config'),
models = require(config.paths.corePath + '/server/models'),
sequence = require(config.paths.corePath + '/server/utils/sequence'),
moment = require('moment'),
_ = require('lodash'),
Promise = require('bluebird'),
messagePrefix = 'Transforming dates to UTC: ',
settingsKey = '006/01',
_private = {};
_private.getTZOffset = function getTZOffset(date) {
return date.getTimezoneOffset();
};
_private.getTZOffsetMax = function getTZOffsetMax() {
return Math.max(Math.abs(new Date('2015-07-01').getTimezoneOffset()), Math.abs(new Date('2015-01-01').getTimezoneOffset()));
};
_private.addOffset = function addOffset(date) {
if (_private.noOffset) {
return moment(date).toDate();
}
return moment(date).add(_private.getTZOffset(date), 'minutes').toDate();
};
/**
* postgres: stores dates with offset, so it's enough to force timezone UTC in the db connection (see data/db/connection.js)
* sqlite: stores UTC timestamps, but we will normalize the format to YYYY-MM-DD HH:mm:ss
*/
module.exports = function transformDatesIntoUTC(options, logger) {
var ServerTimezoneOffset = _private.getTZOffsetMax(),
settingsMigrations = null;
return models.Base.transaction(function (transaction) {
options.transacting = transaction;
// will ensure updated_at fields will not be updated, we take them from the original models
options.importing = true;
options.context = {internal: true};
return sequence([
function databaseCheck() {
if (ServerTimezoneOffset === 0) {
return Promise.reject(new Error('skip'));
}
if (config.database.isPostgreSQL()) {
return Promise.reject(new Error('skip'));
}
if (config.database.client === 'sqlite3') {
_private.noOffset = true;
} else {
_private.noOffset = false;
}
logger.info(messagePrefix + '(could take a while)...');
return Promise.resolve();
},
function checkIfMigrationAlreadyRan() {
return models.Settings.findOne({key: 'migrations'}, options)
.then(function (result) {
try {
settingsMigrations = JSON.parse(result.attributes.value) || {};
} catch (err) {
return Promise.reject(err);
}
// CASE: migration ran already
if (settingsMigrations.hasOwnProperty(settingsKey)) {
return Promise.reject(new Error('skip'));
}
return Promise.resolve();
});
},
function updatePosts() {
return models.Post.findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No Posts found');
return;
}
return Promise.mapSeries(result.models, function mapper(post) {
if (post.get('published_at')) {
post.set('published_at', _private.addOffset(post.get('published_at')));
}
if (post.get('updated_at')) {
post.set('updated_at', _private.addOffset(post.get('updated_at')));
}
post.set('created_at', _private.addOffset(post.get('created_at')));
return models.Post.edit(post.toJSON(), _.merge({}, options, {id: post.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for Posts');
});
});
},
function updateUsers() {
return models.User.findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No Users found');
return;
}
return Promise.mapSeries(result.models, function mapper(user) {
if (user.get('last_login')) {
user.set('last_login', _private.addOffset(user.get('last_login')));
}
if (user.get('updated_at')) {
user.set('updated_at', _private.addOffset(user.get('updated_at')));
}
user.set('created_at', _private.addOffset(user.get('created_at')));
return models.User.edit(user.toJSON(), _.merge({}, options, {id: user.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for Users');
});
});
},
function updateSubscribers() {
return models.Subscriber.findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No Subscribers found');
return;
}
return Promise.mapSeries(result.models, function mapper(subscriber) {
if (subscriber.get('unsubscribed_at')) {
subscriber.set('unsubscribed_at', _private.addOffset(subscriber.get('unsubscribed_at')));
}
if (subscriber.get('updated_at')) {
subscriber.set('updated_at', _private.addOffset(subscriber.get('updated_at')));
}
subscriber.set('created_at', _private.addOffset(subscriber.get('created_at')));
return models.Subscriber.edit(subscriber.toJSON(), _.merge({}, options, {id: subscriber.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for Subscribers');
});
});
},
function updateSettings() {
return models.Settings.findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No Settings found');
return;
}
return Promise.mapSeries(result.models, function mapper(settings) {
// migrations was new created, so it already is in UTC
if (settings.get('key') === 'migrations') {
return Promise.resolve();
}
if (settings.get('updated_at')) {
settings.set('updated_at', _private.addOffset(settings.get('updated_at')));
}
settings.set('created_at', _private.addOffset(settings.get('created_at')));
return models.Settings.edit(settings.toJSON(), _.merge({}, options, {id: settings.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for Settings');
});
});
},
function updateAllOtherModels() {
return Promise.mapSeries(['Role', 'Permission', 'Tag', 'App', 'AppSetting', 'AppField', 'Client'], function (model) {
return models[model].findAll(options).then(function (result) {
if (result.models.length === 0) {
logger.warn(messagePrefix + 'No {model} found'.replace('{model}', model));
return;
}
return Promise.mapSeries(result.models, function mapper(object) {
object.set('created_at', _private.addOffset(object.get('created_at')));
if (object.get('updated_at')) {
object.set('updated_at', _private.addOffset(object.get('updated_at')));
}
return models[model].edit(object.toJSON(), _.merge({}, options, {id: object.get('id')}));
}).then(function () {
logger.info(messagePrefix + 'Updated datetime fields for {model}'.replace('{model}', model));
});
});
});
},
function addMigrationSettingsEntry() {
settingsMigrations[settingsKey] = moment().format();
return models.Settings.edit({
key: 'migrations',
value: JSON.stringify(settingsMigrations)
}, options);
}]
).catch(function (err) {
if (err.message === 'skip') {
logger.warn(messagePrefix + 'Your databases uses UTC datetimes, skip!');
return Promise.resolve();
}
return Promise.reject(err);
});
});
};

View File

@ -1,3 +0,0 @@
module.exports = [
require('./01-transform-dates-into-utc')
];

View File

@ -1,7 +1,7 @@
{
"core": {
"databaseVersion": {
"defaultValue": "006"
"defaultValue": "005"
},
"dbHash": {
"defaultValue": null
@ -11,9 +11,6 @@
},
"displayUpdateNotification": {
"defaultValue": null
},
"migrations": {
"defaultValue": "{}"
}
},
"blog": {

View File

@ -5,8 +5,9 @@ var schema = require('../schema').tables,
Promise = require('bluebird'),
errors = require('../../errors'),
config = require('../../config'),
readThemes = require('../../utils/read-themes'),
readThemes = require('../../utils/read-themes'),
i18n = require('../../i18n'),
toString = require('lodash.tostring'),
validateSchema,
validateSettings,
@ -53,7 +54,7 @@ validateSchema = function validateSchema(tableName, model) {
_.each(columns, function each(columnKey) {
var message = '',
strVal = _.toString(model[columnKey]);
strVal = toString(model[columnKey]);
// check nullable
if (model.hasOwnProperty(columnKey) && schema[tableName][columnKey].hasOwnProperty('nullable')
@ -165,7 +166,7 @@ validateActiveTheme = function validateActiveTheme(themeName) {
// available validators: https://github.com/chriso/validator.js#validators
validate = function validate(value, key, validations) {
var validationErrors = [];
value = _.toString(value);
value = toString(value);
_.each(validations, function each(validationOptions, validationName) {
var goodResult = true;

View File

@ -101,41 +101,16 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
this.set('updated_by', this.contextUser(options));
},
/**
* before we insert dates into the database, we have to normalize
* date format is now in each db the same
*/
fixDatesWhenSave: function fixDates(attrs) {
// Base prototype properties will go here
// Fix problems with dates
fixDates: function fixDates(attrs) {
var self = this;
_.each(attrs, function each(value, key) {
if (value !== null
&& schema.tables[self.tableName].hasOwnProperty(key)
&& schema.tables[self.tableName][key].type === 'dateTime') {
attrs[key] = moment(value).format('YYYY-MM-DD HH:mm:ss');
}
});
return attrs;
},
/**
* all supported databases (pg, sqlite, mysql) return different values
*
* sqlite:
* - knex returns a UTC String
* pg:
* - has an active UTC session through knex and returns UTC Date
* mysql:
* - knex wraps the UTC value into a local JS Date
*/
fixDatesWhenFetch: function fixDates(attrs) {
var self = this;
_.each(attrs, function each(value, key) {
if (value !== null
&& schema.tables[self.tableName].hasOwnProperty(key)
&& schema.tables[self.tableName][key].type === 'dateTime') {
// convert dateTime value into a native javascript Date object
attrs[key] = moment(value).toDate();
}
});
@ -173,12 +148,12 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
// format date before writing to DB, bools work
format: function format(attrs) {
return this.fixDatesWhenSave(attrs);
return this.fixDates(attrs);
},
// format data and bool when fetching from DB
parse: function parse(attrs) {
return this.fixBools(this.fixDatesWhenFetch(attrs));
return this.fixBools(this.fixDates(attrs));
},
toJSON: function toJSON(options) {
@ -224,14 +199,11 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
/**
* Returns an array of keys permitted in every method's `options` hash.
* Can be overridden and added to by a model's `permittedOptions` method.
*
* importing: is used when import a JSON file or when migrating the database
*
* @return {Object} Keys allowed in the `options` hash of every model's method.
*/
permittedOptions: function permittedOptions() {
// terms to whitelist for all methods.
return ['context', 'include', 'transacting', 'importing'];
return ['context', 'include', 'transacting'];
},
/**
@ -380,18 +352,11 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
* @return {Promise(ghostBookshelf.Model)} Edited Model
*/
edit: function edit(data, options) {
var id = options.id,
model = this.forge({id: id});
var id = options.id;
data = this.filterData(data);
options = this.filterOptions(options, 'edit');
// We allow you to disable timestamps when run migration, so that the posts `updated_at` value is the same
if (options.importing) {
model.hasTimestamps = false;
}
return model.fetch(options).then(function then(object) {
return this.forge({id: id}).fetch(options).then(function then(object) {
if (object) {
return object.save(data, options);
}
@ -409,7 +374,6 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
data = this.filterData(data);
options = this.filterOptions(options, 'add');
var model = this.forge(data);
// We allow you to disable timestamps when importing posts so that the new posts `updated_at` value is the same
// as the import json blob. More details refer to https://github.com/TryGhost/Ghost/issues/1696
if (options.importing) {

View File

@ -12,6 +12,7 @@ var _ = require('lodash'),
config = require('../config'),
baseUtils = require('./base/utils'),
i18n = require('../i18n'),
toString = require('lodash.tostring'),
Post,
Posts;
@ -178,11 +179,11 @@ Post = ghostBookshelf.Model.extend({
ghostBookshelf.Model.prototype.saving.call(this, model, attr, options);
this.set('html', converter.makeHtml(_.toString(this.get('markdown'))));
this.set('html', converter.makeHtml(toString(this.get('markdown'))));
// disabling sanitization until we can implement a better version
title = this.get('title') || i18n.t('errors.models.post.untitled');
this.set('title', _.toString(title).trim());
this.set('title', toString(title).trim());
// ### Business logic for published_at and published_by
// If the current status is 'published' and published_at is not set, set it to now
@ -461,7 +462,8 @@ Post = ghostBookshelf.Model.extend({
validOptions = {
findOne: ['columns', 'importing', 'withRelated', 'require'],
findPage: ['page', 'limit', 'columns', 'filter', 'order', 'status', 'staticPages'],
findAll: ['columns', 'filter']
findAll: ['columns', 'filter'],
add: ['importing']
};
if (validOptions[methodName]) {

View File

@ -89,17 +89,12 @@ Settings = ghostBookshelf.Model.extend({
});
}
}, {
findOne: function (data, options) {
if (_.isEmpty(data)) {
options = data;
}
findOne: function (options) {
// Allow for just passing the key instead of attributes
if (!_.isObject(data)) {
data = {key: data};
if (!_.isObject(options)) {
options = {key: options};
}
return Promise.resolve(ghostBookshelf.Model.findOne.call(this, data, options));
return Promise.resolve(ghostBookshelf.Model.findOne.call(this, options));
},
edit: function (data, options) {
@ -130,11 +125,6 @@ Settings = ghostBookshelf.Model.extend({
if (options.context.internal && item.hasOwnProperty('type')) {
saveData.type = item.type;
}
// it's allowed to edit all attributes in case of importing/migrating
if (options.importing) {
saveData = item;
}
return setting.save(saveData, options);
}

View File

@ -10,6 +10,7 @@ var _ = require('lodash'),
validation = require('../data/validation'),
events = require('../events'),
i18n = require('../i18n'),
toString = require('lodash.tostring'),
bcryptGenSalt = Promise.promisify(bcrypt.genSalt),
bcryptHash = Promise.promisify(bcrypt.hash),
@ -368,7 +369,7 @@ User = ghostBookshelf.Model.extend({
userData = this.filterData(data),
roles;
userData.password = _.toString(userData.password);
userData.password = toString(userData.password);
options = this.filterOptions(options, 'add');
options.withRelated = _.union(options.withRelated, options.include);

View File

@ -1,18 +0,0 @@
var moment = require('moment-timezone'),
_ = require('lodash'),
toString = require('lodash.tostring');
/**
* the version of lodash included in Ghost (3.10.1) does not have _.toString - it is added in a later version.
*/
_.toString = toString;
/**
* force UTC
* - you can require moment or moment-timezone, both is configured to UTC
* - you are allowed to use new Date() to instantiate datetime values for models, because they are transformed into UTC in the model layer
* - be careful when not working with models, every value from the native JS Date is local TZ
* - be careful when you work with date operations, therefor always wrap a date into moment
*/
moment.tz.setDefault('UTC');

View File

@ -5,7 +5,7 @@ function StorageBase() {
}
StorageBase.prototype.getTargetDir = function (baseDir) {
var m = moment(),
var m = moment(new Date().getTime()),
month = m.format('MM'),
year = m.format('YYYY');

View File

@ -364,53 +364,6 @@ describe('Post API', function () {
// ## Add
describe('Add', function () {
it('create and ensure dates are correct', function (done) {
var newPost = {posts: [{status: 'published', published_at: '2016-05-30T07:00:00.000Z'}]};
request.post(testUtils.API.getApiQuery('posts'))
.set('Authorization', 'Bearer ' + accesstoken)
.send(newPost)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(201)
.end(function (err, res) {
if (err) {
return done(err);
}
res.body.posts[0].published_at.should.eql('2016-05-30T07:00:00.000Z');
res.body.posts[0].published_at = '2016-05-30T09:00:00.000Z';
request.put(testUtils.API.getApiQuery('posts/' + res.body.posts[0].id + '/'))
.set('Authorization', 'Bearer ' + accesstoken)
.send(res.body)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
res.body.posts[0].published_at.should.eql('2016-05-30T09:00:00.000Z');
request.get(testUtils.API.getApiQuery('posts/' + res.body.posts[0].id + '/'))
.set('Authorization', 'Bearer ' + accesstoken)
.expect('Content-Type', /json/)
.expect('Cache-Control', testUtils.cacheRules.private)
.expect(200)
.end(function (err, res) {
if (err) {
return done(err);
}
res.body.posts[0].published_at.should.eql('2016-05-30T09:00:00.000Z');
done();
});
});
});
});
it('can create a new draft, publish post, update post', function (done) {
var newTitle = 'My Post',
newTagName = 'My Tag',

View File

@ -3,7 +3,6 @@ var testUtils = require('../utils/index'),
should = require('should'),
sinon = require('sinon'),
Promise = require('bluebird'),
moment = require('moment'),
assert = require('assert'),
_ = require('lodash'),
validator = require('validator'),
@ -158,7 +157,7 @@ describe('Import', function () {
it('safely imports data, from 001', function (done) {
var exportData,
timestamp = moment().startOf('day').valueOf(); // no ms
timestamp = 1349928000000;
testUtils.fixtures.loadExportFixture('export-001').then(function (exported) {
exportData = exported;
@ -216,9 +215,9 @@ describe('Import', function () {
// in MySQL we're returned a date object.
// We pass the returned post always through the date object
// to ensure the return is consistent for all DBs.
assert.equal(moment(posts[0].created_at).valueOf(), timestamp);
assert.equal(moment(posts[0].updated_at).valueOf(), timestamp);
assert.equal(moment(posts[0].published_at).valueOf(), timestamp);
assert.equal(new Date(posts[0].created_at).getTime(), timestamp);
assert.equal(new Date(posts[0].updated_at).getTime(), timestamp);
assert.equal(new Date(posts[0].published_at).getTime(), timestamp);
done();
}).catch(done);
@ -322,7 +321,7 @@ describe('Import', function () {
it('safely imports data from 002', function (done) {
var exportData,
timestamp = moment().startOf('day').valueOf(); // no ms
timestamp = 1349928000000;
testUtils.fixtures.loadExportFixture('export-002').then(function (exported) {
exportData = exported;
@ -380,9 +379,9 @@ describe('Import', function () {
// in MySQL we're returned a date object.
// We pass the returned post always through the date object
// to ensure the return is consistant for all DBs.
assert.equal(moment(posts[0].created_at).valueOf(), timestamp);
assert.equal(moment(posts[0].updated_at).valueOf(), timestamp);
assert.equal(moment(posts[0].published_at).valueOf(), timestamp);
assert.equal(new Date(posts[0].created_at).getTime(), timestamp);
assert.equal(new Date(posts[0].updated_at).getTime(), timestamp);
assert.equal(new Date(posts[0].published_at).getTime(), timestamp);
done();
}).catch(done);

View File

@ -1,8 +1,6 @@
/*global describe, it, beforeEach, afterEach, before */
/*global describe, it, beforeEach, afterEach */
var should = require('should'),
sinon = require('sinon'),
_ = require('lodash'),
moment = require('moment'),
rewire = require('rewire'),
Promise = require('bluebird'),
@ -16,7 +14,6 @@ var should = require('should'),
fixtureUtils = require('../../server/data/migration/fixtures/utils'),
fixtures004 = require('../../server/data/migration/fixtures/004'),
fixtures005 = require('../../server/data/migration/fixtures/005'),
fixtures006 = require('../../server/data/migration/fixtures/006'),
ensureDefaultSettings = require('../../server/data/migration/fixtures/settings'),
sandbox = sinon.sandbox.create();
@ -950,199 +947,6 @@ describe('Fixtures', function () {
});
});
});
describe('Update to 006', function () {
it('should call all the 006 fixture upgrades', function (done) {
// Setup
// Create a new stub, this will replace sequence, so that db calls don't actually get run
var sequenceStub = sandbox.stub(),
sequenceReset = update.__set__('sequence', sequenceStub);
// The first time we call sequence, it should be to execute a top level version, e.g 006
// yieldsTo('0') means this stub will execute the function at index 0 of the array passed as the
// first argument. In short the `runVersionTasks` function gets executed, and sequence gets called
// again with the array of tasks to execute for 006, which is what we want to check
sequenceStub.onFirstCall().yieldsTo('0').returns(Promise.resolve([]));
update(['006'], loggerStub).then(function (result) {
should.exist(result);
loggerStub.info.calledTwice.should.be.true();
loggerStub.warn.called.should.be.false();
sequenceStub.calledTwice.should.be.true();
sequenceStub.firstCall.calledWith(sinon.match.array, sinon.match.object, loggerStub).should.be.true();
sequenceStub.firstCall.args[0].should.be.an.Array().with.lengthOf(1);
sequenceStub.firstCall.args[0][0].should.be.a.Function().with.property('name', 'runVersionTasks');
sequenceStub.secondCall.calledWith(sinon.match.array, sinon.match.object, loggerStub).should.be.true();
sequenceStub.secondCall.args[0].should.be.an.Array().with.lengthOf(1);
sequenceStub.secondCall.args[0][0].should.be.a.Function().with.property('name', 'transformDatesIntoUTC');
// Reset
sequenceReset();
done();
}).catch(done);
});
describe('Tasks:', function () {
it('should have tasks for 006', function () {
should.exist(fixtures006);
fixtures006.should.be.an.Array().with.lengthOf(1);
});
describe('01-transform-dates-into-utc', function () {
var updateClient = fixtures006[0],
serverTimezoneOffset,
migrationsSettingsValue;
beforeEach(function () {
sandbox.stub(models.Base, 'transaction', function (stubDone) {
return new Promise(function (resolve) {
stubDone();
setTimeout(function () {
resolve();
}, 500);
});
});
configUtils.config.database.isPostgreSQL = function () {
return false;
};
sandbox.stub(Date.prototype, 'getTimezoneOffset', function () {
return serverTimezoneOffset;
});
sandbox.stub(models.Settings, 'findOne', function () {
return Promise.resolve({attributes: {value: migrationsSettingsValue}});
});
});
describe('error cases', function () {
before(function () {
serverTimezoneOffset = 0;
});
it('server offset is 0', function (done) {
migrationsSettingsValue = '{}';
updateClient({}, loggerStub)
.then(function () {
loggerStub.warn.called.should.be.true();
done();
})
.catch(done);
});
it('migration already ran', function (done) {
migrationsSettingsValue = '{ "006/01": "timestamp" }';
updateClient({}, loggerStub)
.then(function () {
loggerStub.warn.called.should.be.true();
done();
})
.catch(done);
});
});
describe('success cases', function () {
var newModels, createdAt, migrationsSettingsWasUpdated;
before(function () {
serverTimezoneOffset = -60;
migrationsSettingsValue = '{}';
});
beforeEach(function () {
newModels = {};
migrationsSettingsWasUpdated = false;
serverTimezoneOffset = -60;
migrationsSettingsValue = '{}';
sandbox.stub(models.Settings.prototype, 'fetch', function () {
// CASE: we update migrations settings entry
if (this.get('key') === 'migrations') {
migrationsSettingsWasUpdated = true;
return Promise.resolve(newModels[Object.keys(newModels)[0]]);
}
return Promise.resolve(newModels[Number(this.get('key'))]);
});
sandbox.stub(models.Base.Model.prototype, 'save', function (data) {
if (data.key !== 'migrations') {
should.exist(data.created_at);
}
return Promise.resolve({});
});
sandbox.stub(models.Base.Model, 'findAll', function () {
var model = models.Base.Model.forge();
model.set('id', Date.now());
model.set('created_at', createdAt);
model.set('key', model.id.toString());
newModels[model.id] = model;
return Promise.resolve({models: [model]});
});
sandbox.stub(models.Base.Model, 'findOne', function (data) {
return Promise.resolve(newModels[data.id]);
});
sandbox.stub(models.Base.Model, 'edit').returns(Promise.resolve({}));
});
it('sqlite: no UTC update, only format', function (done) {
createdAt = moment(1464798678537).toDate();
configUtils.config.database.client = 'sqlite3';
moment(createdAt).format('YYYY-MM-DD HH:mm:ss').should.eql('2016-06-01 16:31:18');
updateClient({}, loggerStub)
.then(function () {
_.each(newModels, function (model) {
moment(model.get('created_at')).format('YYYY-MM-DD HH:mm:ss').should.eql('2016-06-01 16:31:18');
});
migrationsSettingsWasUpdated.should.eql(true);
done();
})
.catch(done);
});
it('mysql: UTC update', function (done) {
/**
* we fetch 2016-06-01 06:00:00 from the database which was stored as local representation
* our base model will wrap it into a UTC moment
* the offset is 1 hour
* we expect 2016-06-01 05:00:00
*/
createdAt = moment('2016-06-01 06:00:00').toDate();
configUtils.config.database.client = 'mysql';
moment(createdAt).format('YYYY-MM-DD HH:mm:ss').should.eql('2016-06-01 06:00:00');
updateClient({}, loggerStub)
.then(function () {
_.each(newModels, function (model) {
moment(model.get('created_at')).format('YYYY-MM-DD HH:mm:ss').should.eql('2016-06-01 05:00:00');
});
migrationsSettingsWasUpdated.should.eql(true);
done();
})
.catch(done);
});
});
});
});
});
});
describe('Populate fixtures', function () {

View File

@ -31,7 +31,7 @@ var should = require('should'),
// both of which are required for migrations to work properly.
describe('DB version integrity', function () {
// Only these variables should need updating
var currentDbVersion = '006',
var currentDbVersion = '005',
currentSchemaHash = 'f63f41ac97b5665a30c899409bbf9a83',
currentFixturesHash = '56f781fa3bba0fdbf98da5f232ec9b11';

View File

@ -1,13 +1,10 @@
// # Ghost Startup
// Orchestrates the startup of Ghost when run from command line.
var express,
ghost,
parentApp,
errors;
require('./core/server/overrides');
// Make sure dependencies are installed and file system permissions are correct.
require('./core/server/utils/startup-check').check();

View File

@ -51,7 +51,6 @@
"lodash": "3.10.1",
"lodash.tostring": "4.1.3",
"moment": "2.13.0",
"moment-timezone": "0.5.4",
"morgan": "1.7.0",
"multer": "1.1.0",
"netjet": "1.1.1",