2017-03-21 11:24:11 +03:00
|
|
|
var should = require('should'),
|
|
|
|
sinon = require('sinon'),
|
|
|
|
testUtils = require('../utils/index'),
|
|
|
|
Promise = require('bluebird'),
|
|
|
|
moment = require('moment'),
|
|
|
|
assert = require('assert'),
|
|
|
|
_ = require('lodash'),
|
|
|
|
validator = require('validator'),
|
2013-09-25 14:30:59 +04:00
|
|
|
|
|
|
|
// Stuff we are testing
|
2017-03-21 11:24:11 +03:00
|
|
|
db = require('../../server/data/db'),
|
|
|
|
exporter = require('../../server/data/export'),
|
|
|
|
importer = require('../../server/data/import'),
|
|
|
|
DataImporter = require('../../server/data/import/data-importer'),
|
2014-07-21 21:50:04 +04:00
|
|
|
|
2016-02-12 14:56:27 +03:00
|
|
|
knex = db.knex,
|
2014-07-21 21:50:04 +04:00
|
|
|
sandbox = sinon.sandbox.create();
|
2013-09-25 14:30:59 +04:00
|
|
|
|
2014-08-09 23:16:54 +04:00
|
|
|
// Tests in here do an import for each test
|
2014-06-05 01:26:03 +04:00
|
|
|
describe('Import', function () {
|
2014-07-21 21:50:04 +04:00
|
|
|
before(testUtils.teardown);
|
|
|
|
afterEach(testUtils.teardown);
|
|
|
|
afterEach(function () {
|
|
|
|
sandbox.restore();
|
|
|
|
});
|
2013-09-25 14:30:59 +04:00
|
|
|
|
|
|
|
should.exist(exporter);
|
|
|
|
should.exist(importer);
|
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
describe('Resolves', function () {
|
|
|
|
beforeEach(testUtils.setup());
|
2014-02-12 07:40:39 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('resolves DataImporter', function (done) {
|
|
|
|
var importStub = sandbox.stub(DataImporter, 'importData', function () {
|
2014-08-17 10:17:23 +04:00
|
|
|
return Promise.resolve();
|
2014-07-21 21:50:04 +04:00
|
|
|
}),
|
2014-09-10 08:06:24 +04:00
|
|
|
fakeData = {test: true};
|
2013-09-25 14:30:59 +04:00
|
|
|
|
2014-12-29 21:33:47 +03:00
|
|
|
importer.doImport(fakeData).then(function () {
|
2014-07-21 21:50:04 +04:00
|
|
|
importStub.calledWith(fakeData).should.equal(true);
|
2014-02-25 00:28:18 +04:00
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
importStub.restore();
|
2014-02-25 00:28:18 +04:00
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
2014-02-25 00:28:18 +04:00
|
|
|
});
|
|
|
|
|
2014-09-25 05:18:34 +04:00
|
|
|
describe('Sanitizes', function () {
|
|
|
|
beforeEach(testUtils.setup('roles', 'owner', 'settings'));
|
|
|
|
|
|
|
|
it('import results have data and problems', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-003').then(function (exported) {
|
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-09-25 05:18:34 +04:00
|
|
|
}).then(function (importResult) {
|
|
|
|
should.exist(importResult);
|
|
|
|
should.exist(importResult.data);
|
|
|
|
should.exist(importResult.problems);
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
|
|
|
|
it('removes duplicate posts', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-003-duplicate-posts').then(function (exported) {
|
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-09-25 05:18:34 +04:00
|
|
|
}).then(function (importResult) {
|
|
|
|
should.exist(importResult.data.data.posts);
|
|
|
|
|
|
|
|
importResult.data.data.posts.length.should.equal(1);
|
|
|
|
|
|
|
|
importResult.problems.posts.length.should.equal(1);
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
|
|
|
|
it('removes duplicate tags and updates associations', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-003-duplicate-tags').then(function (exported) {
|
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-09-25 05:18:34 +04:00
|
|
|
}).then(function (importResult) {
|
|
|
|
should.exist(importResult.data.data.tags);
|
|
|
|
should.exist(importResult.data.data.posts_tags);
|
|
|
|
|
|
|
|
importResult.data.data.tags.length.should.equal(1);
|
|
|
|
|
|
|
|
// Check we imported all posts_tags associations
|
|
|
|
importResult.data.data.posts_tags.length.should.equal(2);
|
|
|
|
// Check the post_tag.tag_id was updated when we removed duplicate tag
|
2016-06-11 21:23:27 +03:00
|
|
|
_.every(importResult.data.data.posts_tags, function (postTag) {
|
2014-09-25 05:18:34 +04:00
|
|
|
return postTag.tag_id !== 2;
|
|
|
|
});
|
|
|
|
|
|
|
|
importResult.problems.tags.length.should.equal(1);
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
describe('DataImporter', function () {
|
2014-08-09 23:16:54 +04:00
|
|
|
beforeEach(testUtils.setup('roles', 'owner', 'settings'));
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
should.exist(DataImporter);
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2014-06-05 01:26:03 +04:00
|
|
|
it('imports data from 000', function (done) {
|
2014-12-11 18:50:10 +03:00
|
|
|
var exportData;
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
testUtils.fixtures.loadExportFixture('export-000').then(function (exported) {
|
2013-09-25 14:30:59 +04:00
|
|
|
exportData = exported;
|
|
|
|
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2013-09-25 14:30:59 +04:00
|
|
|
}).then(function () {
|
|
|
|
// Grab the data from tables
|
2014-08-17 10:17:23 +04:00
|
|
|
return Promise.all([
|
2014-06-05 01:26:03 +04:00
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('settings').select(),
|
2017-01-10 22:38:20 +03:00
|
|
|
knex('tags').select(),
|
|
|
|
knex('subscribers').select()
|
2013-09-25 14:30:59 +04:00
|
|
|
]);
|
|
|
|
}).then(function (importedData) {
|
|
|
|
should.exist(importedData);
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2017-01-10 22:38:20 +03:00
|
|
|
importedData.length.should.equal(5, 'Did not get data successfully');
|
2013-09-25 14:30:59 +04:00
|
|
|
|
2013-12-29 00:13:47 +04:00
|
|
|
var users = importedData[0],
|
|
|
|
posts = importedData[1],
|
|
|
|
settings = importedData[2],
|
2017-01-10 22:38:20 +03:00
|
|
|
tags = importedData[3],
|
|
|
|
subscribers = importedData[4];
|
|
|
|
|
|
|
|
subscribers.length.should.equal(2, 'There should be two subscribers');
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
// we always have 1 user, the owner user we added
|
2013-12-29 00:13:47 +04:00
|
|
|
users.length.should.equal(1, 'There should only be one user');
|
2017-05-12 15:56:40 +03:00
|
|
|
|
2013-09-25 14:30:59 +04:00
|
|
|
// import no longer requires all data to be dropped, and adds posts
|
2014-07-21 21:50:04 +04:00
|
|
|
posts.length.should.equal(exportData.data.posts.length, 'Wrong number of posts');
|
2017-05-12 15:56:40 +03:00
|
|
|
posts[0].status.should.eql('published');
|
|
|
|
posts[1].status.should.eql('scheduled');
|
2013-09-25 14:30:59 +04:00
|
|
|
|
|
|
|
// test settings
|
2013-12-29 00:13:47 +04:00
|
|
|
settings.length.should.be.above(0, 'Wrong number of settings');
|
2013-09-25 14:30:59 +04:00
|
|
|
|
|
|
|
// test tags
|
2013-12-29 00:13:47 +04:00
|
|
|
tags.length.should.equal(exportData.data.tags.length, 'no new tags');
|
|
|
|
|
2013-09-25 14:30:59 +04:00
|
|
|
done();
|
2014-05-06 00:58:58 +04:00
|
|
|
}).catch(done);
|
2013-09-25 14:30:59 +04:00
|
|
|
});
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('safely imports data, from 001', function (done) {
|
2013-12-29 00:13:47 +04:00
|
|
|
var exportData,
|
2016-06-03 11:06:18 +03:00
|
|
|
timestamp = moment().startOf('day').valueOf(); // no ms
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
testUtils.fixtures.loadExportFixture('export-001').then(function (exported) {
|
2013-11-24 18:29:36 +04:00
|
|
|
exportData = exported;
|
|
|
|
|
2013-12-26 07:48:16 +04:00
|
|
|
// Modify timestamp data for testing
|
|
|
|
exportData.data.posts[0].created_at = timestamp;
|
|
|
|
exportData.data.posts[0].updated_at = timestamp;
|
|
|
|
exportData.data.posts[0].published_at = timestamp;
|
|
|
|
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2013-11-24 18:29:36 +04:00
|
|
|
}).then(function () {
|
|
|
|
// Grab the data from tables
|
2014-08-17 10:17:23 +04:00
|
|
|
return Promise.all([
|
2014-06-05 01:26:03 +04:00
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('settings').select(),
|
|
|
|
knex('tags').select()
|
2013-11-24 18:29:36 +04:00
|
|
|
]);
|
|
|
|
}).then(function (importedData) {
|
|
|
|
should.exist(importedData);
|
2013-12-26 07:48:16 +04:00
|
|
|
|
2013-11-24 18:29:36 +04:00
|
|
|
importedData.length.should.equal(4, 'Did not get data successfully');
|
|
|
|
|
2013-12-26 07:48:16 +04:00
|
|
|
var users = importedData[0],
|
|
|
|
posts = importedData[1],
|
|
|
|
settings = importedData[2],
|
2015-05-21 00:55:22 +03:00
|
|
|
tags = importedData[3];
|
2013-12-29 00:13:47 +04:00
|
|
|
|
|
|
|
// we always have 1 user, the default user we added
|
|
|
|
users.length.should.equal(1, 'There should only be one user');
|
|
|
|
|
|
|
|
// user should still have the credentials from the original insert, not the import
|
|
|
|
users[0].email.should.equal(testUtils.DataGenerator.Content.users[0].email);
|
|
|
|
users[0].password.should.equal(testUtils.DataGenerator.Content.users[0].password);
|
|
|
|
// but the name, slug, and bio should have been overridden
|
|
|
|
users[0].name.should.equal(exportData.data.users[0].name);
|
|
|
|
users[0].slug.should.equal(exportData.data.users[0].slug);
|
2014-08-09 23:16:54 +04:00
|
|
|
should.not.exist(users[0].bio, 'bio is not imported');
|
2013-12-26 07:48:16 +04:00
|
|
|
|
2013-11-24 18:29:36 +04:00
|
|
|
// import no longer requires all data to be dropped, and adds posts
|
2014-07-21 21:50:04 +04:00
|
|
|
posts.length.should.equal(exportData.data.posts.length, 'Wrong number of posts');
|
2013-11-24 18:29:36 +04:00
|
|
|
|
2017-04-24 20:41:00 +03:00
|
|
|
// active_theme should NOT have been overridden
|
|
|
|
_.find(settings, {key: 'active_theme'}).value.should.equal('casper', 'Wrong theme');
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2013-11-24 18:29:36 +04:00
|
|
|
// test tags
|
2013-12-26 07:48:16 +04:00
|
|
|
tags.length.should.equal(exportData.data.tags.length, 'no new tags');
|
|
|
|
|
|
|
|
// Ensure imported post retains set timestamp
|
|
|
|
// When in sqlite we are returned a unix timestamp number,
|
|
|
|
// in MySQL we're returned a date object.
|
|
|
|
// We pass the returned post always through the date object
|
2014-07-21 21:50:04 +04:00
|
|
|
// to ensure the return is consistent for all DBs.
|
2016-06-03 11:06:18 +03:00
|
|
|
assert.equal(moment(posts[0].created_at).valueOf(), timestamp);
|
|
|
|
assert.equal(moment(posts[0].updated_at).valueOf(), timestamp);
|
|
|
|
assert.equal(moment(posts[0].published_at).valueOf(), timestamp);
|
2013-11-24 18:29:36 +04:00
|
|
|
|
|
|
|
done();
|
2014-05-06 00:58:58 +04:00
|
|
|
}).catch(done);
|
2013-11-24 18:29:36 +04:00
|
|
|
});
|
|
|
|
|
2014-06-05 01:26:03 +04:00
|
|
|
it('doesn\'t import invalid settings data from 001', function (done) {
|
2013-11-21 00:36:02 +04:00
|
|
|
var exportData;
|
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
testUtils.fixtures.loadExportFixture('export-001').then(function (exported) {
|
2013-11-21 00:36:02 +04:00
|
|
|
exportData = exported;
|
2014-09-10 08:06:24 +04:00
|
|
|
// change to blank settings key
|
2013-11-21 00:36:02 +04:00
|
|
|
exportData.data.settings[3].key = null;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2013-11-21 00:36:02 +04:00
|
|
|
}).then(function () {
|
|
|
|
(1).should.eql(0, 'Data import should not resolve promise.');
|
|
|
|
}, function (error) {
|
2014-07-29 01:41:45 +04:00
|
|
|
error[0].message.should.eql('Value in [settings.key] cannot be blank.');
|
2015-04-22 23:29:45 +03:00
|
|
|
error[0].errorType.should.eql('ValidationError');
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2014-08-17 10:17:23 +04:00
|
|
|
Promise.all([
|
2014-06-05 01:26:03 +04:00
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('tags').select()
|
2013-11-21 00:36:02 +04:00
|
|
|
]).then(function (importedData) {
|
|
|
|
should.exist(importedData);
|
2013-12-29 00:13:47 +04:00
|
|
|
|
2017-01-26 15:12:00 +03:00
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
2013-11-21 00:36:02 +04:00
|
|
|
|
2013-12-29 00:13:47 +04:00
|
|
|
var users = importedData[0],
|
|
|
|
posts = importedData[1],
|
2017-01-26 15:12:00 +03:00
|
|
|
tags = importedData[2];
|
2013-12-29 00:13:47 +04:00
|
|
|
|
|
|
|
// we always have 1 user, the default user we added
|
|
|
|
users.length.should.equal(1, 'There should only be one user');
|
2014-07-21 21:50:04 +04:00
|
|
|
|
|
|
|
// Nothing should have been imported
|
|
|
|
posts.length.should.equal(0, 'Wrong number of posts');
|
|
|
|
tags.length.should.equal(0, 'no new tags');
|
2013-11-21 00:36:02 +04:00
|
|
|
|
|
|
|
done();
|
|
|
|
});
|
2014-05-06 00:58:58 +04:00
|
|
|
}).catch(done);
|
2013-11-21 00:36:02 +04:00
|
|
|
});
|
2013-09-25 14:30:59 +04:00
|
|
|
});
|
2013-11-24 18:29:36 +04:00
|
|
|
|
2014-06-05 01:26:03 +04:00
|
|
|
describe('002', function () {
|
2014-08-09 23:16:54 +04:00
|
|
|
beforeEach(testUtils.setup('roles', 'owner', 'settings'));
|
2014-07-21 21:50:04 +04:00
|
|
|
|
2014-06-05 01:26:03 +04:00
|
|
|
it('safely imports data from 002', function (done) {
|
2014-01-15 17:29:23 +04:00
|
|
|
var exportData,
|
2016-06-03 11:06:18 +03:00
|
|
|
timestamp = moment().startOf('day').valueOf(); // no ms
|
2014-01-15 17:29:23 +04:00
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
testUtils.fixtures.loadExportFixture('export-002').then(function (exported) {
|
2014-01-15 17:29:23 +04:00
|
|
|
exportData = exported;
|
|
|
|
|
|
|
|
// Modify timestamp data for testing
|
|
|
|
exportData.data.posts[0].created_at = timestamp;
|
|
|
|
exportData.data.posts[0].updated_at = timestamp;
|
|
|
|
exportData.data.posts[0].published_at = timestamp;
|
|
|
|
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-01-15 17:29:23 +04:00
|
|
|
}).then(function () {
|
|
|
|
// Grab the data from tables
|
2014-08-17 10:17:23 +04:00
|
|
|
return Promise.all([
|
2014-06-05 01:26:03 +04:00
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('settings').select(),
|
|
|
|
knex('tags').select()
|
2014-01-15 17:29:23 +04:00
|
|
|
]);
|
|
|
|
}).then(function (importedData) {
|
|
|
|
should.exist(importedData);
|
|
|
|
|
|
|
|
importedData.length.should.equal(4, 'Did not get data successfully');
|
|
|
|
|
|
|
|
var users = importedData[0],
|
|
|
|
posts = importedData[1],
|
|
|
|
settings = importedData[2],
|
2015-05-21 00:55:22 +03:00
|
|
|
tags = importedData[3];
|
2014-01-15 17:29:23 +04:00
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
// we always have 1 user, the owner user we added
|
2014-01-15 17:29:23 +04:00
|
|
|
users.length.should.equal(1, 'There should only be one user');
|
|
|
|
|
|
|
|
// user should still have the credentials from the original insert, not the import
|
|
|
|
users[0].email.should.equal(testUtils.DataGenerator.Content.users[0].email);
|
|
|
|
users[0].password.should.equal(testUtils.DataGenerator.Content.users[0].password);
|
|
|
|
// but the name, slug, and bio should have been overridden
|
|
|
|
users[0].name.should.equal(exportData.data.users[0].name);
|
|
|
|
users[0].slug.should.equal(exportData.data.users[0].slug);
|
2014-08-09 23:16:54 +04:00
|
|
|
should.not.exist(users[0].bio, 'bio is not imported');
|
2014-01-15 17:29:23 +04:00
|
|
|
|
|
|
|
// import no longer requires all data to be dropped, and adds posts
|
2014-07-21 21:50:04 +04:00
|
|
|
posts.length.should.equal(exportData.data.posts.length, 'Wrong number of posts');
|
2014-01-15 17:29:23 +04:00
|
|
|
|
2017-04-24 20:41:00 +03:00
|
|
|
// active_theme should NOT have been overridden
|
|
|
|
_.find(settings, {key: 'active_theme'}).value.should.equal('casper', 'Wrong theme');
|
2014-01-15 17:29:23 +04:00
|
|
|
|
|
|
|
// test tags
|
|
|
|
tags.length.should.equal(exportData.data.tags.length, 'no new tags');
|
|
|
|
|
|
|
|
// Ensure imported post retains set timestamp
|
|
|
|
// When in sqlite we are returned a unix timestamp number,
|
|
|
|
// in MySQL we're returned a date object.
|
|
|
|
// We pass the returned post always through the date object
|
|
|
|
// to ensure the return is consistant for all DBs.
|
2016-06-03 11:06:18 +03:00
|
|
|
assert.equal(moment(posts[0].created_at).valueOf(), timestamp);
|
|
|
|
assert.equal(moment(posts[0].updated_at).valueOf(), timestamp);
|
|
|
|
assert.equal(moment(posts[0].published_at).valueOf(), timestamp);
|
2014-01-15 17:29:23 +04:00
|
|
|
|
|
|
|
done();
|
2014-08-09 23:16:54 +04:00
|
|
|
}).catch(done);
|
2014-01-15 17:29:23 +04:00
|
|
|
});
|
|
|
|
|
2014-06-05 01:26:03 +04:00
|
|
|
it('doesn\'t import invalid settings data from 002', function (done) {
|
2014-01-15 17:29:23 +04:00
|
|
|
var exportData;
|
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
testUtils.fixtures.loadExportFixture('export-002').then(function (exported) {
|
2014-01-15 17:29:23 +04:00
|
|
|
exportData = exported;
|
2014-09-10 08:06:24 +04:00
|
|
|
// change to blank settings key
|
2014-01-15 17:29:23 +04:00
|
|
|
exportData.data.settings[3].key = null;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-01-15 17:29:23 +04:00
|
|
|
}).then(function () {
|
|
|
|
(1).should.eql(0, 'Data import should not resolve promise.');
|
|
|
|
}, function (error) {
|
2014-07-29 01:41:45 +04:00
|
|
|
error[0].message.should.eql('Value in [settings.key] cannot be blank.');
|
2015-04-22 23:29:45 +03:00
|
|
|
error[0].errorType.should.eql('ValidationError');
|
2014-01-15 17:29:23 +04:00
|
|
|
|
2014-08-17 10:17:23 +04:00
|
|
|
Promise.all([
|
2014-06-05 01:26:03 +04:00
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('tags').select()
|
2014-01-15 17:29:23 +04:00
|
|
|
]).then(function (importedData) {
|
|
|
|
should.exist(importedData);
|
|
|
|
|
2017-01-26 15:12:00 +03:00
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
2014-01-15 17:29:23 +04:00
|
|
|
|
|
|
|
var users = importedData[0],
|
|
|
|
posts = importedData[1],
|
2017-01-26 15:12:00 +03:00
|
|
|
tags = importedData[2];
|
2014-01-15 17:29:23 +04:00
|
|
|
|
2014-07-21 21:50:04 +04:00
|
|
|
// we always have 1 user, the owner user we added
|
2014-01-15 17:29:23 +04:00
|
|
|
users.length.should.equal(1, 'There should only be one user');
|
2014-07-21 21:50:04 +04:00
|
|
|
// Nothing should have been imported
|
|
|
|
posts.length.should.equal(0, 'Wrong number of posts');
|
|
|
|
tags.length.should.equal(0, 'no new tags');
|
2014-01-15 17:29:23 +04:00
|
|
|
|
|
|
|
done();
|
|
|
|
});
|
2014-05-06 00:58:58 +04:00
|
|
|
}).catch(done);
|
2014-01-15 17:29:23 +04:00
|
|
|
});
|
|
|
|
});
|
2014-02-25 00:28:18 +04:00
|
|
|
|
2014-06-05 01:26:03 +04:00
|
|
|
describe('003', function () {
|
2014-07-31 23:53:55 +04:00
|
|
|
beforeEach(testUtils.setup('roles', 'owner', 'settings'));
|
2014-07-21 21:50:04 +04:00
|
|
|
|
2014-07-31 23:53:55 +04:00
|
|
|
it('safely imports data from 003 (single user)', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-003').then(function (exported) {
|
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-07-31 23:53:55 +04:00
|
|
|
}).then(function () {
|
|
|
|
// Grab the data from tables
|
2014-08-17 10:17:23 +04:00
|
|
|
return Promise.all([
|
2014-07-31 23:53:55 +04:00
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('tags').select()
|
|
|
|
]);
|
|
|
|
}).then(function (importedData) {
|
|
|
|
should.exist(importedData);
|
|
|
|
|
2017-01-26 15:12:00 +03:00
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
2014-07-31 23:53:55 +04:00
|
|
|
|
|
|
|
var users = importedData[0],
|
|
|
|
posts = importedData[1],
|
2017-01-26 15:12:00 +03:00
|
|
|
tags = importedData[2];
|
2014-07-31 23:53:55 +04:00
|
|
|
|
|
|
|
// user should still have the credentials from the original insert, not the import
|
|
|
|
users[0].email.should.equal(testUtils.DataGenerator.Content.users[0].email);
|
|
|
|
users[0].password.should.equal(testUtils.DataGenerator.Content.users[0].password);
|
|
|
|
// but the name, slug, and bio should have been overridden
|
|
|
|
users[0].name.should.equal(exportData.data.users[0].name);
|
|
|
|
users[0].slug.should.equal(exportData.data.users[0].slug);
|
2014-08-09 23:16:54 +04:00
|
|
|
should.not.exist(users[0].bio, 'bio is not imported');
|
2014-07-31 23:53:55 +04:00
|
|
|
|
|
|
|
// test posts
|
|
|
|
posts.length.should.equal(1, 'Wrong number of posts');
|
|
|
|
// test tags
|
|
|
|
tags.length.should.equal(1, 'no new tags');
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
|
2014-07-29 01:41:45 +04:00
|
|
|
it('handles validation errors nicely', function (done) {
|
2014-02-25 00:28:18 +04:00
|
|
|
var exportData;
|
|
|
|
|
2014-07-31 23:53:55 +04:00
|
|
|
testUtils.fixtures.loadExportFixture('export-003-badValidation').then(function (exported) {
|
2014-02-25 00:28:18 +04:00
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-08-09 23:16:54 +04:00
|
|
|
}).then(function () {
|
|
|
|
done(new Error('Allowed import of duplicate data'));
|
2014-07-29 01:41:45 +04:00
|
|
|
}).catch(function (response) {
|
|
|
|
response.length.should.equal(5);
|
2015-04-22 23:29:45 +03:00
|
|
|
response[0].errorType.should.equal('ValidationError');
|
2014-07-29 01:41:45 +04:00
|
|
|
response[0].message.should.eql('Value in [posts.title] cannot be blank.');
|
2015-04-22 23:29:45 +03:00
|
|
|
response[1].errorType.should.equal('ValidationError');
|
2014-07-29 01:41:45 +04:00
|
|
|
response[1].message.should.eql('Value in [posts.slug] cannot be blank.');
|
2015-04-22 23:29:45 +03:00
|
|
|
response[2].errorType.should.equal('ValidationError');
|
2014-07-29 01:41:45 +04:00
|
|
|
response[2].message.should.eql('Value in [settings.key] cannot be blank.');
|
2015-04-22 23:29:45 +03:00
|
|
|
response[3].errorType.should.equal('ValidationError');
|
2014-07-29 01:41:45 +04:00
|
|
|
response[3].message.should.eql('Value in [tags.slug] cannot be blank.');
|
2015-04-22 23:29:45 +03:00
|
|
|
response[4].errorType.should.equal('ValidationError');
|
2014-07-29 01:41:45 +04:00
|
|
|
response[4].message.should.eql('Value in [tags.name] cannot be blank.');
|
|
|
|
done();
|
2014-08-02 02:08:56 +04:00
|
|
|
}).catch(done);
|
2014-07-29 01:41:45 +04:00
|
|
|
});
|
2014-02-25 00:28:18 +04:00
|
|
|
|
2014-07-29 01:41:45 +04:00
|
|
|
it('handles database errors nicely', function (done) {
|
|
|
|
var exportData;
|
2014-07-31 23:53:55 +04:00
|
|
|
testUtils.fixtures.loadExportFixture('export-003-dbErrors').then(function (exported) {
|
2014-07-29 01:41:45 +04:00
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-08-09 23:16:54 +04:00
|
|
|
}).then(function () {
|
|
|
|
done(new Error('Allowed import of duplicate data'));
|
2014-07-29 01:41:45 +04:00
|
|
|
}).catch(function (response) {
|
2014-08-21 00:36:04 +04:00
|
|
|
response.length.should.be.above(0);
|
2015-04-22 23:29:45 +03:00
|
|
|
response[0].errorType.should.equal('DataImportError');
|
2014-02-25 00:28:18 +04:00
|
|
|
done();
|
2014-08-02 02:08:56 +04:00
|
|
|
}).catch(done);
|
2014-02-25 00:28:18 +04:00
|
|
|
});
|
2014-07-28 02:22:00 +04:00
|
|
|
|
2014-08-09 23:16:54 +04:00
|
|
|
it('doesn\'t import posts with an invalid author', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-003-mu-unknownAuthor').then(function (exported) {
|
|
|
|
exportData = exported;
|
|
|
|
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-08-09 23:16:54 +04:00
|
|
|
}).then(function () {
|
|
|
|
done(new Error('Allowed import of unknown author'));
|
|
|
|
}).catch(function (response) {
|
|
|
|
response.length.should.equal(1);
|
|
|
|
response[0].message.should.eql('Attempting to import data linked to unknown user id 2');
|
2015-04-22 23:29:45 +03:00
|
|
|
response[0].errorType.should.equal('DataImportError');
|
2014-08-09 23:16:54 +04:00
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
|
2014-07-28 02:22:00 +04:00
|
|
|
it('doesn\'t import invalid tags data from 003', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-003-nullTags').then(function (exported) {
|
|
|
|
exportData = exported;
|
|
|
|
|
|
|
|
exportData.data.tags.length.should.be.above(1);
|
|
|
|
exportData.data.posts_tags.length.should.be.above(1);
|
|
|
|
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-07-28 02:22:00 +04:00
|
|
|
}).then(function () {
|
|
|
|
done(new Error('Allowed import of invalid tags data'));
|
|
|
|
}).catch(function (response) {
|
2014-08-22 23:04:52 +04:00
|
|
|
response.length.should.equal(2);
|
2015-04-22 23:29:45 +03:00
|
|
|
response[0].errorType.should.equal('ValidationError');
|
2014-08-22 23:04:52 +04:00
|
|
|
response[0].message.should.eql('Value in [tags.name] cannot be blank.');
|
2015-04-22 23:29:45 +03:00
|
|
|
response[1].errorType.should.equal('ValidationError');
|
2014-08-22 23:04:52 +04:00
|
|
|
response[1].message.should.eql('Value in [tags.slug] cannot be blank.');
|
2014-07-28 02:22:00 +04:00
|
|
|
done();
|
2014-08-02 02:08:56 +04:00
|
|
|
}).catch(done);
|
2014-07-28 02:22:00 +04:00
|
|
|
});
|
|
|
|
|
|
|
|
it('doesn\'t import invalid posts data from 003', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-003-nullPosts').then(function (exported) {
|
|
|
|
exportData = exported;
|
|
|
|
|
|
|
|
exportData.data.posts.length.should.be.above(1);
|
|
|
|
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-07-28 02:22:00 +04:00
|
|
|
}).then(function () {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
done(new Error('Allowed import of invalid post data'));
|
2014-07-28 02:22:00 +04:00
|
|
|
}).catch(function (response) {
|
2014-08-22 23:04:52 +04:00
|
|
|
response.length.should.equal(5, response);
|
2014-07-28 02:22:00 +04:00
|
|
|
done();
|
2014-08-02 02:08:56 +04:00
|
|
|
}).catch(done);
|
2014-07-28 02:22:00 +04:00
|
|
|
});
|
2014-08-22 23:04:52 +04:00
|
|
|
|
|
|
|
it('correctly sanitizes incorrect UUIDs', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-003-wrongUUID').then(function (exported) {
|
|
|
|
exportData = exported;
|
|
|
|
|
|
|
|
exportData.data.posts.length.should.be.above(0);
|
|
|
|
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-08-22 23:04:52 +04:00
|
|
|
}).then(function () {
|
|
|
|
// Grab the data from tables
|
|
|
|
return knex('posts').select();
|
|
|
|
}).then(function (importedData) {
|
|
|
|
should.exist(importedData);
|
|
|
|
|
|
|
|
assert.equal(validator.isUUID(importedData[0].uuid), true, 'Old Ghost UUID NOT fixed');
|
|
|
|
assert.equal(validator.isUUID(importedData[1].uuid), true, 'Empty UUID NOT fixed');
|
|
|
|
assert.equal(validator.isUUID(importedData[2].uuid), true, 'Missing UUID NOT fixed');
|
|
|
|
assert.equal(validator.isUUID(importedData[3].uuid), true, 'Malformed UUID NOT fixed');
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
2014-02-25 00:28:18 +04:00
|
|
|
});
|
🚨 database: change hard limits and field types (#7932)
refs #7432
🚨 database: change hard limits and field types
- we went over all schema fields and decided to decrease/increase the hard limits
- the core goal is to have more flexibility in the future
- we reconsidered string vs. text
There are 5 groups:
- small strings: 50 characters
- static strings
- status, visibility, language, role name, permission name, client name etc.
- medium strings: 191 characters
- all unique fields or fields which can be unique in the future
- slug, tokens, user name, password, tag name, email
- large strings: 1000-2000 characters
- these fields need to be very flexible
- these fields get a soft limit attached (in a different PR)
- post title, meta title, meta description, urls
- medium text: 64kb characters
- bio, settings, location, tour
- long text: 1000000000 chars
- html, amp, mobiledoc, markdown
🙄 sort_order for tests
- sort order was not set for the tests, so it was always 0
- mysql could return a different result
in my case:
- field length 156 returned the following related tags ["bacon", "kitchen"]
- field length 157 returned the following related tags ["kitchen", "kitchen"]
Change client.secret to 191
Tweak field lengths
- Add 24 char limit for ids
- Limited fields are the exact length they need
- Unified 1000 and 2000 char string classes to all be 2000
- Changed descriptions to be either 2000, except user & tag which is text 65535 as these may be used to store HTML later?!
- Updated tests
🛠 Update importer tests
- The old 001-003 tests are kind of less relevant now.
- Rather than worrying about past versions of the data structure, we should check that the importer only imports what we consider to be valid data
- I've changed the tests to treat the title-length check as a length-validation check, rather than a test for each of the old versions
🔥 Remove foreign key from subscribers.post_id
- There's no real need to have an index on this column, it just makes deleting posts hard.
- Same as created_by type columns, we can reference ids without needing keys/indexes
2017-02-18 01:20:59 +03:00
|
|
|
|
|
|
|
describe('Validation', function () {
|
|
|
|
beforeEach(testUtils.setup('roles', 'owner', 'settings'));
|
|
|
|
|
|
|
|
it('doesn\'t import a title which is too long', function (done) {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
testUtils.fixtures.loadExportFixture('export-001').then(function (exported) {
|
|
|
|
exportData = exported;
|
|
|
|
|
|
|
|
// change title to 1001 characters
|
|
|
|
exportData.data.posts[0].title = new Array(2002).join('a');
|
|
|
|
exportData.data.posts[0].tags = 'Tag';
|
|
|
|
return importer.doImport(exportData);
|
|
|
|
}).then(function () {
|
|
|
|
(1).should.eql(0, 'Data import should not resolve promise.');
|
|
|
|
}, function (error) {
|
|
|
|
error[0].message.should.eql('Value in [posts.title] exceeds maximum length of 2000 characters.');
|
|
|
|
error[0].errorType.should.eql('ValidationError');
|
|
|
|
|
|
|
|
Promise.all([
|
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('tags').select()
|
|
|
|
]).then(function (importedData) {
|
|
|
|
should.exist(importedData);
|
|
|
|
|
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
|
|
|
|
|
|
|
var users = importedData[0],
|
|
|
|
posts = importedData[1],
|
|
|
|
tags = importedData[2];
|
|
|
|
|
|
|
|
// we always have 1 user, the default user we added
|
|
|
|
users.length.should.equal(1, 'There should only be one user');
|
|
|
|
|
|
|
|
// Nothing should have been imported
|
|
|
|
posts.length.should.equal(0, 'Wrong number of posts');
|
|
|
|
tags.length.should.equal(0, 'no new tags');
|
|
|
|
|
|
|
|
done();
|
|
|
|
});
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
});
|
2013-09-25 14:30:59 +04:00
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
|
|
|
// Tests in here do an import-per-describe, and then have several tests to check various bits of data
|
|
|
|
describe('Import (new test structure)', function () {
|
|
|
|
before(testUtils.teardown);
|
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
describe('imports multi user data onto blank ghost install', function () {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
before(function doImport(done) {
|
|
|
|
testUtils.initFixtures('roles', 'owner', 'settings').then(function () {
|
|
|
|
return testUtils.fixtures.loadExportFixture('export-003-mu');
|
|
|
|
}).then(function (exported) {
|
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-12-11 18:50:10 +03:00
|
|
|
}).then(function () {
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
after(testUtils.teardown);
|
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('gets the right data', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('settings').select(),
|
|
|
|
knex('tags').select()
|
|
|
|
);
|
|
|
|
|
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var posts,
|
|
|
|
settings,
|
|
|
|
tags,
|
|
|
|
post1,
|
|
|
|
post2,
|
|
|
|
post3;
|
|
|
|
|
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// Test posts, settings and tags
|
|
|
|
posts = importedData[0];
|
|
|
|
settings = importedData[1];
|
|
|
|
tags = importedData[2];
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
post1 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[0].slug;
|
|
|
|
});
|
|
|
|
post2 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[1].slug;
|
|
|
|
});
|
|
|
|
post3 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[2].slug;
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// test posts
|
|
|
|
posts.length.should.equal(3, 'Wrong number of posts');
|
|
|
|
post1.title.should.equal(exportData.data.posts[0].title);
|
|
|
|
post2.title.should.equal(exportData.data.posts[1].title);
|
|
|
|
post3.title.should.equal(exportData.data.posts[2].title);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// test tags
|
|
|
|
tags.length.should.equal(3, 'should be 3 tags');
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('imports users with correct roles and status', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('users').select(),
|
|
|
|
knex('roles_users').select()
|
|
|
|
);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var user1,
|
|
|
|
user2,
|
|
|
|
user3,
|
|
|
|
users,
|
|
|
|
rolesUsers;
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(2, 'Did not get data successfully');
|
|
|
|
|
|
|
|
// Test the users and roles
|
|
|
|
users = importedData[0];
|
|
|
|
rolesUsers = importedData[1];
|
|
|
|
|
|
|
|
// we imported 3 users
|
|
|
|
// the original user should be untouched
|
|
|
|
// the two news users should have been created
|
|
|
|
users.length.should.equal(3, 'There should only be three users');
|
|
|
|
|
|
|
|
// the owner user is first
|
|
|
|
user1 = users[0];
|
|
|
|
// the other two users should have the imported data, but they get inserted in different orders
|
|
|
|
user2 = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[1].name;
|
|
|
|
});
|
|
|
|
user3 = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[2].name;
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
user1.email.should.equal(testUtils.DataGenerator.Content.users[0].email);
|
|
|
|
user1.password.should.equal(testUtils.DataGenerator.Content.users[0].password);
|
|
|
|
user1.status.should.equal('active');
|
|
|
|
user2.email.should.equal(exportData.data.users[1].email);
|
|
|
|
user3.email.should.equal(exportData.data.users[2].email);
|
|
|
|
|
|
|
|
// Newly created users should have a status of locked
|
|
|
|
user2.status.should.equal('locked');
|
|
|
|
user3.status.should.equal('locked');
|
|
|
|
|
|
|
|
// Newly created users should have created_at/_by and updated_at/_by set to when they were imported
|
|
|
|
user2.created_by.should.equal(user1.id);
|
|
|
|
user2.created_at.should.not.equal(exportData.data.users[1].created_at);
|
|
|
|
user2.updated_by.should.equal(user1.id);
|
|
|
|
user2.updated_at.should.not.equal(exportData.data.users[1].updated_at);
|
|
|
|
user3.created_by.should.equal(user1.id);
|
|
|
|
user3.created_at.should.not.equal(exportData.data.users[2].created_at);
|
|
|
|
user3.updated_by.should.equal(user1.id);
|
|
|
|
user3.updated_at.should.not.equal(exportData.data.users[2].updated_at);
|
|
|
|
|
|
|
|
rolesUsers.length.should.equal(3, 'There should be 3 role relations');
|
|
|
|
|
|
|
|
_.each(rolesUsers, function (roleUser) {
|
|
|
|
if (roleUser.user_id === user1.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[3].id, 'Original user should be an owner');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
if (roleUser.user_id === user2.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[0].id, 'Josephine should be an admin');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
if (roleUser.user_id === user3.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[2].id, 'Smith should be an author by default');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('imports posts & tags with correct authors, owners etc', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('tags').select()
|
|
|
|
);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var users, user1, user2, user3,
|
|
|
|
posts, post1, post2, post3,
|
|
|
|
tags, tag1, tag2, tag3;
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
|
|
|
|
|
|
|
// Test the users and roles
|
|
|
|
users = importedData[0];
|
|
|
|
posts = importedData[1];
|
2017-03-21 11:24:11 +03:00
|
|
|
tags = importedData[2];
|
2014-12-11 18:50:10 +03:00
|
|
|
|
|
|
|
// Grab the users
|
|
|
|
// the owner user is first
|
|
|
|
user1 = users[0];
|
|
|
|
// the other two users should have the imported data, but they get inserted in different orders
|
|
|
|
user2 = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[1].name;
|
|
|
|
});
|
|
|
|
user3 = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[2].name;
|
|
|
|
});
|
|
|
|
post1 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[0].slug;
|
|
|
|
});
|
|
|
|
post2 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[1].slug;
|
|
|
|
});
|
|
|
|
post3 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[2].slug;
|
|
|
|
});
|
|
|
|
tag1 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[0].slug;
|
|
|
|
});
|
|
|
|
tag2 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[1].slug;
|
|
|
|
});
|
|
|
|
tag3 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[2].slug;
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// Check the authors are correct
|
|
|
|
post1.author_id.should.equal(user2.id);
|
|
|
|
post2.author_id.should.equal(user3.id);
|
|
|
|
post3.author_id.should.equal(user1.id);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// Created by should be what was in the import file
|
|
|
|
post1.created_by.should.equal(user1.id);
|
|
|
|
post2.created_by.should.equal(user3.id);
|
|
|
|
post3.created_by.should.equal(user1.id);
|
|
|
|
|
|
|
|
// Updated by gets set to the current user
|
|
|
|
post1.updated_by.should.equal(user1.id);
|
|
|
|
post2.updated_by.should.equal(user1.id);
|
|
|
|
post3.updated_by.should.equal(user1.id);
|
|
|
|
|
|
|
|
// Published by should be what was in the import file
|
|
|
|
post1.published_by.should.equal(user2.id);
|
|
|
|
post2.published_by.should.equal(user3.id);
|
|
|
|
post3.published_by.should.equal(user1.id);
|
|
|
|
|
|
|
|
// Created by should be what was in the import file
|
|
|
|
tag1.created_by.should.equal(user1.id);
|
|
|
|
tag2.created_by.should.equal(user2.id);
|
|
|
|
tag3.created_by.should.equal(user3.id);
|
|
|
|
|
|
|
|
// Updated by gets set to the current user
|
|
|
|
tag1.updated_by.should.equal(user1.id);
|
|
|
|
tag2.updated_by.should.equal(user1.id);
|
|
|
|
tag3.updated_by.should.equal(user1.id);
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
2014-08-09 23:16:54 +04:00
|
|
|
});
|
2014-12-11 18:50:10 +03:00
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
describe('imports multi user data with no owner onto blank ghost install', function () {
|
|
|
|
var exportData;
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
before(function doImport(done) {
|
|
|
|
testUtils.initFixtures('roles', 'owner', 'settings').then(function () {
|
|
|
|
return testUtils.fixtures.loadExportFixture('export-003-mu-noOwner');
|
|
|
|
}).then(function (exported) {
|
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-12-11 18:50:10 +03:00
|
|
|
}).then(function () {
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
after(testUtils.teardown);
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('gets the right data', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('settings').select(),
|
|
|
|
knex('tags').select()
|
|
|
|
);
|
|
|
|
|
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var posts,
|
|
|
|
settings,
|
|
|
|
tags,
|
|
|
|
post1,
|
|
|
|
post2,
|
|
|
|
post3;
|
|
|
|
|
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// Test posts, settings and tags
|
|
|
|
posts = importedData[0];
|
|
|
|
settings = importedData[1];
|
|
|
|
tags = importedData[2];
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
post1 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[0].slug;
|
|
|
|
});
|
|
|
|
post2 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[1].slug;
|
|
|
|
});
|
|
|
|
post3 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[2].slug;
|
|
|
|
});
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// test posts
|
|
|
|
posts.length.should.equal(3, 'Wrong number of posts');
|
|
|
|
post1.title.should.equal(exportData.data.posts[0].title);
|
|
|
|
post2.title.should.equal(exportData.data.posts[1].title);
|
|
|
|
post3.title.should.equal(exportData.data.posts[2].title);
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// test tags
|
|
|
|
tags.length.should.equal(3, 'should be 3 tags');
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('imports users with correct roles and status', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('users').select(),
|
|
|
|
knex('roles_users').select()
|
|
|
|
);
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var user1,
|
|
|
|
user2,
|
|
|
|
user3,
|
|
|
|
users,
|
|
|
|
rolesUsers;
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(2, 'Did not get data successfully');
|
|
|
|
|
|
|
|
// Test the users and roles
|
|
|
|
users = importedData[0];
|
|
|
|
rolesUsers = importedData[1];
|
|
|
|
|
|
|
|
// we imported 3 users
|
|
|
|
// the original user should be untouched
|
|
|
|
// the two news users should have been created
|
|
|
|
users.length.should.equal(3, 'There should only be three users');
|
|
|
|
|
|
|
|
// the owner user is first
|
|
|
|
user1 = users[0];
|
|
|
|
// the other two users should have the imported data, but they get inserted in different orders
|
|
|
|
user2 = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[0].name;
|
|
|
|
});
|
|
|
|
user3 = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[1].name;
|
|
|
|
});
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
user1.email.should.equal(testUtils.DataGenerator.Content.users[0].email);
|
|
|
|
user1.password.should.equal(testUtils.DataGenerator.Content.users[0].password);
|
|
|
|
user1.status.should.equal('active');
|
|
|
|
user2.email.should.equal(exportData.data.users[0].email);
|
|
|
|
user3.email.should.equal(exportData.data.users[1].email);
|
|
|
|
|
|
|
|
// Newly created users should have a status of locked
|
|
|
|
user2.status.should.equal('locked');
|
|
|
|
user3.status.should.equal('locked');
|
|
|
|
|
|
|
|
// Newly created users should have created_at/_by and updated_at/_by set to when they were imported
|
|
|
|
user2.created_by.should.equal(user1.id);
|
|
|
|
user2.created_at.should.not.equal(exportData.data.users[0].created_at);
|
|
|
|
user2.updated_by.should.equal(user1.id);
|
|
|
|
user2.updated_at.should.not.equal(exportData.data.users[0].updated_at);
|
|
|
|
user3.created_by.should.equal(user1.id);
|
|
|
|
user3.created_at.should.not.equal(exportData.data.users[1].created_at);
|
|
|
|
user3.updated_by.should.equal(user1.id);
|
|
|
|
user3.updated_at.should.not.equal(exportData.data.users[1].updated_at);
|
|
|
|
|
|
|
|
rolesUsers.length.should.equal(3, 'There should be 3 role relations');
|
|
|
|
|
|
|
|
_.each(rolesUsers, function (roleUser) {
|
|
|
|
if (roleUser.user_id === user1.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[3].id, 'Original user should be an owner');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
if (roleUser.user_id === user2.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[0].id, 'Josephine should be an admin');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
if (roleUser.user_id === user3.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[2].id, 'Smith should be an author by default');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
});
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
2014-09-10 08:06:24 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('imports posts & tags with correct authors, owners etc', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('tags').select()
|
|
|
|
);
|
|
|
|
|
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var users, user1, user2, user3,
|
|
|
|
posts, post1, post2, post3,
|
|
|
|
tags, tag1, tag2, tag3;
|
|
|
|
|
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
|
|
|
|
|
|
|
// Test the users and roles
|
|
|
|
users = importedData[0];
|
|
|
|
posts = importedData[1];
|
2017-03-21 11:24:11 +03:00
|
|
|
tags = importedData[2];
|
2014-12-11 18:50:10 +03:00
|
|
|
|
|
|
|
// Grab the users
|
|
|
|
// the owner user is first
|
|
|
|
user1 = users[0];
|
|
|
|
// the other two users should have the imported data, but they get inserted in different orders
|
|
|
|
user2 = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[0].name;
|
|
|
|
});
|
|
|
|
user3 = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[1].name;
|
|
|
|
});
|
|
|
|
post1 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[0].slug;
|
|
|
|
});
|
|
|
|
post2 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[1].slug;
|
|
|
|
});
|
|
|
|
post3 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[2].slug;
|
|
|
|
});
|
|
|
|
tag1 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[0].slug;
|
|
|
|
});
|
|
|
|
tag2 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[1].slug;
|
|
|
|
});
|
|
|
|
tag3 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[2].slug;
|
|
|
|
});
|
|
|
|
|
|
|
|
// Check the authors are correct
|
|
|
|
post1.author_id.should.equal(user2.id);
|
|
|
|
post2.author_id.should.equal(user3.id);
|
|
|
|
post3.author_id.should.equal(user1.id);
|
|
|
|
|
|
|
|
// Created by should be what was in the import file
|
|
|
|
post1.created_by.should.equal(user1.id);
|
|
|
|
post2.created_by.should.equal(user3.id);
|
|
|
|
post3.created_by.should.equal(user1.id);
|
|
|
|
|
|
|
|
// Updated by gets set to the current user
|
|
|
|
post1.updated_by.should.equal(user1.id);
|
|
|
|
post2.updated_by.should.equal(user1.id);
|
|
|
|
post3.updated_by.should.equal(user1.id);
|
|
|
|
|
|
|
|
// Published by should be what was in the import file
|
|
|
|
post1.published_by.should.equal(user2.id);
|
|
|
|
post2.published_by.should.equal(user3.id);
|
|
|
|
post3.published_by.should.equal(user1.id);
|
|
|
|
|
|
|
|
// Created by should be what was in the import file
|
|
|
|
tag1.created_by.should.equal(user1.id);
|
|
|
|
tag2.created_by.should.equal(user2.id);
|
|
|
|
tag3.created_by.should.equal(user3.id);
|
|
|
|
|
|
|
|
// Updated by gets set to the current user
|
|
|
|
tag1.updated_by.should.equal(user1.id);
|
|
|
|
tag2.updated_by.should.equal(user1.id);
|
|
|
|
tag3.updated_by.should.equal(user1.id);
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
2014-09-10 08:06:24 +04:00
|
|
|
});
|
2014-12-11 18:50:10 +03:00
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
describe('imports multi user data onto existing data', function () {
|
|
|
|
var exportData;
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
before(function doImport(done) {
|
|
|
|
// initialise the blog with some data
|
|
|
|
testUtils.initFixtures('users:roles', 'posts', 'settings').then(function () {
|
|
|
|
return testUtils.fixtures.loadExportFixture('export-003-mu');
|
|
|
|
}).then(function (exported) {
|
|
|
|
exportData = exported;
|
2014-12-29 21:33:47 +03:00
|
|
|
return importer.doImport(exportData);
|
2014-12-11 18:50:10 +03:00
|
|
|
}).then(function () {
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
after(testUtils.teardown);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('gets the right data', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('settings').select(),
|
|
|
|
knex('tags').select()
|
|
|
|
);
|
|
|
|
|
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var posts,
|
|
|
|
settings,
|
|
|
|
tags,
|
|
|
|
post1,
|
|
|
|
post2,
|
|
|
|
post3;
|
|
|
|
|
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// Test posts, settings and tags
|
|
|
|
posts = importedData[0];
|
|
|
|
settings = importedData[1];
|
|
|
|
tags = importedData[2];
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
post1 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[0].slug;
|
|
|
|
});
|
|
|
|
post2 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[1].slug;
|
|
|
|
});
|
|
|
|
post3 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[2].slug;
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// test posts
|
|
|
|
posts.length.should.equal(
|
|
|
|
(exportData.data.posts.length + testUtils.DataGenerator.Content.posts.length),
|
|
|
|
'Wrong number of posts'
|
|
|
|
);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
posts[0].title.should.equal(testUtils.DataGenerator.Content.posts[0].title);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
post1.title.should.equal(exportData.data.posts[0].title);
|
|
|
|
post2.title.should.equal(exportData.data.posts[1].title);
|
|
|
|
post3.title.should.equal(exportData.data.posts[2].title);
|
|
|
|
|
|
|
|
// test tags
|
|
|
|
tags.length.should.equal(
|
|
|
|
(exportData.data.tags.length + testUtils.DataGenerator.Content.tags.length),
|
|
|
|
'Wrong number of tags'
|
2014-08-09 23:16:54 +04:00
|
|
|
);
|
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
tags[0].name.should.equal(testUtils.DataGenerator.Content.tags[0].name);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
it('imports users with correct roles and status', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('users').select(),
|
|
|
|
knex('roles_users').select()
|
|
|
|
);
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var ownerUser,
|
|
|
|
newUser,
|
|
|
|
existingUser,
|
|
|
|
users,
|
|
|
|
rolesUsers;
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(2, 'Did not get data successfully');
|
2014-08-09 23:16:54 +04:00
|
|
|
|
2014-12-11 18:50:10 +03:00
|
|
|
// Test the users and roles
|
|
|
|
users = importedData[0];
|
|
|
|
rolesUsers = importedData[1];
|
|
|
|
|
|
|
|
// we imported 3 users, there were already 4 users, only one of the imported users is new
|
|
|
|
users.length.should.equal(5, 'There should only be three users');
|
|
|
|
|
|
|
|
// the owner user is first
|
|
|
|
ownerUser = users[0];
|
|
|
|
// the other two users should have the imported data, but they get inserted in different orders
|
|
|
|
newUser = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[1].name;
|
|
|
|
});
|
|
|
|
existingUser = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[2].name;
|
|
|
|
});
|
|
|
|
|
|
|
|
ownerUser.email.should.equal(testUtils.DataGenerator.Content.users[0].email);
|
|
|
|
ownerUser.password.should.equal(testUtils.DataGenerator.Content.users[0].password);
|
|
|
|
ownerUser.status.should.equal('active');
|
|
|
|
newUser.email.should.equal(exportData.data.users[1].email);
|
|
|
|
existingUser.email.should.equal(exportData.data.users[2].email);
|
|
|
|
|
|
|
|
// Newly created users should have a status of locked
|
|
|
|
newUser.status.should.equal('locked');
|
|
|
|
// The already existing user should still have a status of active
|
|
|
|
existingUser.status.should.equal('active');
|
|
|
|
|
|
|
|
// Newly created users should have created_at/_by and updated_at/_by set to when they were imported
|
|
|
|
newUser.created_by.should.equal(ownerUser.id);
|
|
|
|
newUser.created_at.should.not.equal(exportData.data.users[1].created_at);
|
|
|
|
newUser.updated_by.should.equal(ownerUser.id);
|
|
|
|
newUser.updated_at.should.not.equal(exportData.data.users[1].updated_at);
|
|
|
|
|
|
|
|
rolesUsers.length.should.equal(5, 'There should be 5 role relations');
|
|
|
|
|
|
|
|
_.each(rolesUsers, function (roleUser) {
|
|
|
|
if (roleUser.user_id === ownerUser.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[3].id, 'Original user should be an owner');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
if (roleUser.user_id === newUser.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[0].id, 'New user should be an admin');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
if (roleUser.user_id === existingUser.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[0].id, 'Existing user was an admin');
|
2014-12-11 18:50:10 +03:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
|
|
|
|
it('imports posts & tags with correct authors, owners etc', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('users').select(),
|
|
|
|
knex('posts').select(),
|
|
|
|
knex('tags').select()
|
|
|
|
);
|
|
|
|
|
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var users, ownerUser, newUser, existingUser,
|
|
|
|
posts, post1, post2, post3,
|
|
|
|
tags, tag1, tag2, tag3;
|
|
|
|
|
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(3, 'Did not get data successfully');
|
|
|
|
|
|
|
|
// Test the users and roles
|
|
|
|
users = importedData[0];
|
|
|
|
posts = importedData[1];
|
2017-03-21 11:24:11 +03:00
|
|
|
tags = importedData[2];
|
2014-12-11 18:50:10 +03:00
|
|
|
|
|
|
|
// Grab the users
|
|
|
|
// the owner user is first
|
|
|
|
ownerUser = users[0];
|
|
|
|
// the other two users should have the imported data, but they get inserted in different orders
|
|
|
|
newUser = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[1].name;
|
|
|
|
});
|
|
|
|
existingUser = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[2].name;
|
|
|
|
});
|
|
|
|
post1 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[0].slug;
|
|
|
|
});
|
|
|
|
post2 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[1].slug;
|
|
|
|
});
|
|
|
|
post3 = _.find(posts, function (post) {
|
|
|
|
return post.slug === exportData.data.posts[2].slug;
|
|
|
|
});
|
|
|
|
tag1 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[0].slug;
|
|
|
|
});
|
|
|
|
tag2 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[1].slug;
|
|
|
|
});
|
|
|
|
tag3 = _.find(tags, function (tag) {
|
|
|
|
return tag.slug === exportData.data.tags[2].slug;
|
|
|
|
});
|
|
|
|
|
|
|
|
// Check the authors are correct
|
|
|
|
post1.author_id.should.equal(newUser.id);
|
|
|
|
post2.author_id.should.equal(existingUser.id);
|
|
|
|
post3.author_id.should.equal(ownerUser.id);
|
|
|
|
|
|
|
|
// Created by should be what was in the import file
|
|
|
|
post1.created_by.should.equal(ownerUser.id);
|
|
|
|
post2.created_by.should.equal(existingUser.id);
|
|
|
|
post3.created_by.should.equal(ownerUser.id);
|
|
|
|
|
|
|
|
// Updated by gets set to the current user
|
|
|
|
post1.updated_by.should.equal(ownerUser.id);
|
|
|
|
post2.updated_by.should.equal(ownerUser.id);
|
|
|
|
post3.updated_by.should.equal(ownerUser.id);
|
|
|
|
|
|
|
|
// Published by should be what was in the import file
|
|
|
|
post1.published_by.should.equal(newUser.id);
|
|
|
|
post2.published_by.should.equal(existingUser.id);
|
|
|
|
post3.published_by.should.equal(ownerUser.id);
|
|
|
|
|
|
|
|
// Created by should be what was in the import file
|
|
|
|
tag1.created_by.should.equal(ownerUser.id);
|
|
|
|
tag2.created_by.should.equal(newUser.id);
|
|
|
|
tag3.created_by.should.equal(existingUser.id);
|
|
|
|
|
|
|
|
// Updated by gets set to the current user
|
|
|
|
tag1.updated_by.should.equal(ownerUser.id);
|
|
|
|
tag2.updated_by.should.equal(ownerUser.id);
|
|
|
|
tag3.updated_by.should.equal(ownerUser.id);
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
2014-08-09 23:16:54 +04:00
|
|
|
});
|
|
|
|
});
|
2015-05-12 22:40:01 +03:00
|
|
|
|
|
|
|
describe('imports multi user data onto existing data without duplicate owners', function () {
|
|
|
|
var exportData;
|
|
|
|
|
|
|
|
before(function doImport(done) {
|
|
|
|
// initialise the blog with some data
|
|
|
|
testUtils.initFixtures('users:roles', 'posts', 'settings').then(function () {
|
|
|
|
return testUtils.fixtures.loadExportFixture('export-003-mu-multipleOwner');
|
|
|
|
}).then(function (exported) {
|
|
|
|
exportData = exported;
|
|
|
|
return importer.doImport(exportData);
|
|
|
|
}).then(function () {
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
after(testUtils.teardown);
|
|
|
|
|
|
|
|
it('imports users with correct roles and status', function (done) {
|
|
|
|
var fetchImported = Promise.join(
|
|
|
|
knex('users').select(),
|
|
|
|
knex('roles_users').select()
|
|
|
|
);
|
|
|
|
|
|
|
|
fetchImported.then(function (importedData) {
|
|
|
|
var ownerUser,
|
|
|
|
newUser,
|
|
|
|
existingUser,
|
|
|
|
users,
|
|
|
|
rolesUsers;
|
|
|
|
|
|
|
|
// General data checks
|
|
|
|
should.exist(importedData);
|
|
|
|
importedData.length.should.equal(2, 'Did not get data successfully');
|
|
|
|
|
|
|
|
// Test the users and roles
|
|
|
|
users = importedData[0];
|
|
|
|
rolesUsers = importedData[1];
|
|
|
|
|
|
|
|
// the owner user is first
|
|
|
|
ownerUser = users[0];
|
|
|
|
|
|
|
|
// the other two users should have the imported data, but they get inserted in different orders
|
|
|
|
newUser = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[1].name;
|
|
|
|
});
|
|
|
|
existingUser = _.find(users, function (user) {
|
|
|
|
return user.name === exportData.data.users[2].name;
|
|
|
|
});
|
|
|
|
|
|
|
|
// we imported 3 users, there were already 4 users, only one of the imported users is new
|
|
|
|
users.length.should.equal(5, 'There should only be three users');
|
|
|
|
|
|
|
|
rolesUsers.length.should.equal(5, 'There should be 5 role relations');
|
|
|
|
|
|
|
|
_.each(rolesUsers, function (roleUser) {
|
|
|
|
if (roleUser.user_id === ownerUser.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[3].id, 'Original user should be an owner');
|
2015-05-12 22:40:01 +03:00
|
|
|
}
|
|
|
|
if (roleUser.user_id === newUser.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[0].id, 'New user should be downgraded from owner to admin');
|
2015-05-12 22:40:01 +03:00
|
|
|
}
|
|
|
|
if (roleUser.user_id === existingUser.id) {
|
✨ replace auto increment id's by object id (#7495)
* 🛠 bookshelf tarball, bson-objectid
* 🎨 schema changes
- change increment type to string
- add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID)
- remove uuid, because ID now represents a global resource identifier
- keep uuid for post, because we are using this as preview id
- keep uuid for clients for now - we are using this param for Ghost-Auth
* ✨ base model: generate ObjectId on creating event
- each new resource get's a auto generate ObjectId
- this logic won't work for attached models, this commit comes later
* 🎨 centralised attach method
When attaching models there are two things important two know
1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model.
2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code.
Important: please only use the custom attach wrapper in the future.
* 🎨 token model had overriden the onCreating function because of the created_at field
- we need to ensure that the base onCreating hook get's triggered for ALL models
- if not, they don't get an ObjectId assigned
- in this case: be smart and check if the target model has a created_at field
* 🎨 we don't have a uuid field anymore, remove the usages
- no default uuid creation in models
- i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all
* 🎨 do not parse ID to Number
- we had various occurances of parsing all ID's to numbers
- we don't need this behaviour anymore
- ID is string
- i will adapt the ID validation in the next commit
* 🎨 change ID regex for validation
- we only allow: ID as ObjectId, ID as 1 and ID as me
- we need to keep ID 1, because our whole software relies on ID 1 (permissions etc)
* 🎨 owner fixture
- roles: [4] does not work anymore
- 4 means -> static id 4
- this worked in an auto increment system (not even in a system with distributed writes)
- with ObjectId we generate each ID automatically (for static and dynamic resources)
- it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources
- so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system)
- NOTE: please read through the comment in the user model
* 🎨 tests: DataGenerator and test utils
First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly.
This commit brings lot's of dynamic into all the static defined id's.
In one of the next commits, i will adapt all the tests.
* 🚨 remove counter in Notification API
- no need to add a counter
- we simply generate ObjectId's (they are auto incremental as well)
- our id validator does only allow ObjectId as id,1 and me
* 🎨 extend contextUser in Base Model
- remove isNumber check, because id's are no longer numbers, except of id 0/1
- use existing isExternalUser
- support id 0/1 as string or number
* ✨ Ghost Owner has id 1
- ensure we define this id in the fixtures.json
- doesn't matter if number or string
* 🎨 functional tests adaptions
- use dynamic id's
* 🎨 fix unit tests
* 🎨 integration tests adaptions
* 🎨 change importer utils
- all our export examples (test/fixtures/exports) contain id's as numbers
- fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249
- in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers
- i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings!
- i think this logic is a little bit complicated, but i don't want to refactor this now
- this commit ensures when trying to find the user, the id comparison works again
- i've added more documentation to understand this logic ;)
- plus i renamed an attribute to improve readability
* 🎨 Data-Generator: add more defaults to createUser
- if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults
* 🎨 test utils: change/extend function set for functional tests
- functional tests work a bit different
- they boot Ghost and seed the database
- some functional tests have mis-used the test setup
- the test setup needs two sections: integration/unit and functional tests
- any functional test is allowed to either add more data or change data in the existing Ghost db
- but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work
- this commit adds a clean method for functional tests to add extra users
* 🎨 functional tests adaptions
- use last commit to insert users for functional tests clean
- tidy up usage of testUtils.setup or testUtils.doAuth
* 🐛 test utils: reset database before init
- ensure we don't have any left data from other tests in the database when starting ghost
* 🐛 fix test (unrelated to this PR)
- fixes a random failure
- return statement was missing
* 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
|
|
|
roleUser.role_id.should.equal(testUtils.DataGenerator.Content.roles[0].id, 'Existing user was an admin');
|
2015-05-12 22:40:01 +03:00
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
done();
|
|
|
|
}).catch(done);
|
|
|
|
});
|
|
|
|
});
|
2014-08-09 23:16:54 +04:00
|
|
|
});
|