Ghost/test/regression/models/model_posts.test.js

1909 lines
88 KiB
JavaScript
Raw Normal View History

const errors = require('@tryghost/errors');
const should = require('should');
const sinon = require('sinon');
const testUtils = require('../../utils');
const moment = require('moment');
const _ = require('lodash');
const Promise = require('bluebird');
const {sequence} = require('@tryghost/promise');
const urlService = require('../../../core/frontend/services/url');
const ghostBookshelf = require('../../../core/server/models/base');
const models = require('../../../core/server/models');
const db = require('../../../core/server/data/db');
const settingsCache = require('../../../core/shared/settings-cache');
const events = require('../../../core/server/lib/common/events');
const configUtils = require('../../utils/configUtils');
const context = testUtils.context.owner;
const markdownToMobiledoc = testUtils.DataGenerator.markdownToMobiledoc;
/**
* IMPORTANT:
* - do not spy the events unit, because when we only spy, all listeners get the event
* - this can cause unexpected behaviour as the listeners execute code
* - using rewire is not possible, because each model self registers it's model registry in bookshelf
* - rewire would add 1 registry, a file who requires the models, tries to register the model another time
*/
describe('Post Model', function () {
let eventsTriggered = {};
before(testUtils.teardownDb);
before(testUtils.stopGhost);
after(testUtils.teardownDb);
before(testUtils.setup('users:roles'));
afterEach(function () {
sinon.restore();
});
✨Dynamic Routing Beta (#9596) refs #9601 ### Dynamic Routing This is the beta version of dynamic routing. - we had a initial implementation of "channels" available in the codebase - we have removed and moved this implementation - there is now a centralised place for dynamic routing - server/services/routing - each routing component is represented by a router type e.g. collections, routes, static pages, taxonomies, rss, preview of posts - keep as much as possible logic of routing helpers, middlewares and controllers - ensure test coverage - connect all the things together - yaml file + validation - routing + routers - url service - sitemaps - url access - deeper implementation of yaml validations - e.g. hard require slashes - ensure routing hierarchy/order - e.g. you enable the subscriber app - you have a custom static page, which lives under the same slug /subscribe - static pages are stronger than apps - e.g. the first collection owns the post it has filtered - a post cannot live in two collections - ensure apps are still working and hook into the routers layer (or better said: and register in the routing service) - put as much as possible comments to the code base for better understanding - ensure a clean debug log - ensure we can unmount routes - e.g. you have a collection permalink of /:slug/ represented by {globals.permalink} - and you change the permalink in the admin to dated permalink - the express route get's refreshed from /:slug/ to /:year/:month/:day/:slug/ - unmount without server restart, yey - ensure we are backwards compatible - e.g. render home.hbs for collection index if collection route is / - ensure you can access your configured permalink from the settings table with {globals.permalink} ### Render 503 if url service did not finish - return 503 if the url service has not finished generating the resource urls ### Rewrite sitemaps - we have rewritten the sitemaps "service", because the url generator does no longer happen on runtime - we generate all urls on bootstrap - the sitemaps service will consume created resource and router urls - these urls will be shown on the xml pages - we listen on url events - we listen on router events - we no longer have to fetch the resources, which is nice - the urlservice pre-fetches resources and emits their urls - the urlservice is the only component who knows which urls are valid - i made some ES6 adaptions - we keep the caching logic -> only regenerate xml if there is a change - updated tests - checked test coverage (100%) ### Re-work usage of Url utility - replace all usages of `urlService.utils.urlFor` by `urlService.getByResourceId` - only for resources e.g. post, author, tag - this is important, because with dynamic routing we no longer create static urls based on the settings permalink on runtime - adapt url utility - adapt tests
2018-06-05 20:02:20 +03:00
beforeEach(function () {
sinon.stub(urlService, 'getUrlByResourceId').withArgs(testUtils.DataGenerator.Content.posts[0].id).returns('/html-ipsum/');
✨Dynamic Routing Beta (#9596) refs #9601 ### Dynamic Routing This is the beta version of dynamic routing. - we had a initial implementation of "channels" available in the codebase - we have removed and moved this implementation - there is now a centralised place for dynamic routing - server/services/routing - each routing component is represented by a router type e.g. collections, routes, static pages, taxonomies, rss, preview of posts - keep as much as possible logic of routing helpers, middlewares and controllers - ensure test coverage - connect all the things together - yaml file + validation - routing + routers - url service - sitemaps - url access - deeper implementation of yaml validations - e.g. hard require slashes - ensure routing hierarchy/order - e.g. you enable the subscriber app - you have a custom static page, which lives under the same slug /subscribe - static pages are stronger than apps - e.g. the first collection owns the post it has filtered - a post cannot live in two collections - ensure apps are still working and hook into the routers layer (or better said: and register in the routing service) - put as much as possible comments to the code base for better understanding - ensure a clean debug log - ensure we can unmount routes - e.g. you have a collection permalink of /:slug/ represented by {globals.permalink} - and you change the permalink in the admin to dated permalink - the express route get's refreshed from /:slug/ to /:year/:month/:day/:slug/ - unmount without server restart, yey - ensure we are backwards compatible - e.g. render home.hbs for collection index if collection route is / - ensure you can access your configured permalink from the settings table with {globals.permalink} ### Render 503 if url service did not finish - return 503 if the url service has not finished generating the resource urls ### Rewrite sitemaps - we have rewritten the sitemaps "service", because the url generator does no longer happen on runtime - we generate all urls on bootstrap - the sitemaps service will consume created resource and router urls - these urls will be shown on the xml pages - we listen on url events - we listen on router events - we no longer have to fetch the resources, which is nice - the urlservice pre-fetches resources and emits their urls - the urlservice is the only component who knows which urls are valid - i made some ES6 adaptions - we keep the caching logic -> only regenerate xml if there is a change - updated tests - checked test coverage (100%) ### Re-work usage of Url utility - replace all usages of `urlService.utils.urlFor` by `urlService.getByResourceId` - only for resources e.g. post, author, tag - this is important, because with dynamic routing we no longer create static urls based on the settings permalink on runtime - adapt url utility - adapt tests
2018-06-05 20:02:20 +03:00
});
describe('Single author posts', function () {
afterEach(function () {
configUtils.restore();
});
describe('fetchOne/fetchAll/fetchPage', function () {
before(testUtils.fixtures.insertPostsAndTags);
after(function () {
return testUtils.truncate('posts_tags')
.then(function () {
return testUtils.truncate('tags');
})
.then(function () {
return testUtils.truncate('posts');
})
.then(function () {
return testUtils.truncate('posts_meta');
});
});
✨ Added ability to send a newsletter to members with a certain label or product (#12932) refs https://github.com/TryGhost/Team/issues/581 refs https://github.com/TryGhost/Team/issues/582 When publishing a post via the API it was possible to send it using `?email_recipient_filter=all/free/paid` which allowed you to send to members only based on their payment status which is quite limiting for some sites. This PR updates the `?email_recipient_filter` query param to support Ghost's `?filter` param syntax which enables more specific recipient lists, eg: `?email_recipient_filter=status:free` = free members only `?email_recipient_filter=status:paid` = paid members only `?email_recipient_filter=label:vip` = members that have the `vip` label attached `?email_recipient_filter=status:paid,label:vip` = paid members and members that have the `vip` label attached The older `free/paid` values are still supported by the API for backwards compatibility. - updates `Post` and `Email` models to transform legacy `free` and `paid` values to their NQL equivalents on read/write - lets us not worry about supporting legacy values elsewhere in the code - cleanup migration to transform all rows slated for 5.0 - removes schema and API `isIn` validations for recipient filters so allow free-form filters - updates posts API input serializers to transform `free` and `paid` values in the `?email_recipient_filter` param to their NQL equivalents for backwards compatibility - updates Post API controllers `edit` methods to run a query using the supplied filter to verify that it's valid - updates `mega` service to use the filter directly when selecting recipients
2021-05-07 13:56:41 +03:00
describe('findOne', function () {
it('transforms legacy email_recipient_filter values on read', function (done) {
const postId = testUtils.DataGenerator.Content.posts[0].id;
db.knex('posts').where({id: postId}).update({
email_recipient_filter: 'paid'
}).then(() => {
return db.knex('posts').where({id: postId});
}).then((knexResult) => {
const [knexPost] = knexResult;
knexPost.email_recipient_filter.should.equal('paid');
return models.Post.findOne({id: postId});
}).then((result) => {
should.exist(result);
const post = result.toJSON();
post.email_recipient_filter.should.equal('status:-free');
done();
}).catch(done);
});
});
describe('findPage', function () {
describe('with more posts/tags', function () {
beforeEach(function () {
return testUtils.truncate('posts_tags')
.then(function () {
return testUtils.truncate('tags');
})
.then(function () {
return testUtils.truncate('posts_meta');
})
.then(function () {
return testUtils.truncate('posts');
});
});
beforeEach(function () {
return testUtils.fixtures.insertPostsAndTags()
.then(function () {
return testUtils.fixtures.insertExtraPosts();
})
.then(function () {
return testUtils.fixtures.insertExtraPostsTags();
});
});
it('can findPage, with various options', function (done) {
models.Post.findPage({page: 2})
.then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(2);
paginationResult.meta.pagination.limit.should.equal(15);
paginationResult.meta.pagination.pages.should.equal(4);
paginationResult.data.length.should.equal(15);
return models.Post.findPage({page: 5});
}).then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(5);
paginationResult.meta.pagination.limit.should.equal(15);
paginationResult.meta.pagination.pages.should.equal(4);
paginationResult.data.length.should.equal(0);
return models.Post.findPage({limit: 30});
}).then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(1);
paginationResult.meta.pagination.limit.should.equal(30);
paginationResult.meta.pagination.pages.should.equal(2);
paginationResult.data.length.should.equal(30);
// Test featured pages
return models.Post.findPage({limit: 10, filter: 'featured:true'});
}).then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(1);
paginationResult.meta.pagination.limit.should.equal(10);
paginationResult.meta.pagination.pages.should.equal(1);
paginationResult.data.length.should.equal(2);
// Test both boolean formats for featured pages
return models.Post.findPage({limit: 10, filter: 'featured:1'});
}).then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(1);
paginationResult.meta.pagination.limit.should.equal(10);
paginationResult.meta.pagination.pages.should.equal(1);
paginationResult.data.length.should.equal(2);
return models.Post.findPage({limit: 10, page: 2, status: 'all'});
}).then(function (paginationResult) {
paginationResult.meta.pagination.pages.should.equal(11);
return models.Post.findPage({limit: 'all', status: 'all'});
}).then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(1);
paginationResult.meta.pagination.limit.should.equal('all');
paginationResult.meta.pagination.pages.should.equal(1);
paginationResult.data.length.should.equal(108);
done();
}).catch(done);
});
it('can findPage for tag, with various options', function (done) {
// Test tag filter
models.Post.findPage({page: 1, filter: 'tags:bacon'})
.then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(1);
paginationResult.meta.pagination.limit.should.equal(15);
paginationResult.meta.pagination.pages.should.equal(1);
paginationResult.data.length.should.equal(2);
return models.Post.findPage({page: 1, filter: 'tags:kitchen-sink'});
}).then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(1);
paginationResult.meta.pagination.limit.should.equal(15);
paginationResult.meta.pagination.pages.should.equal(1);
paginationResult.data.length.should.equal(2);
return models.Post.findPage({page: 1, filter: 'tags:injection'});
}).then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(1);
paginationResult.meta.pagination.limit.should.equal(15);
paginationResult.meta.pagination.pages.should.equal(2);
paginationResult.data.length.should.equal(15);
return models.Post.findPage({page: 2, filter: 'tags:injection'});
}).then(function (paginationResult) {
paginationResult.meta.pagination.page.should.equal(2);
paginationResult.meta.pagination.limit.should.equal(15);
paginationResult.meta.pagination.pages.should.equal(2);
paginationResult.data.length.should.equal(10);
done();
}).catch(done);
});
});
});
});
describe('edit', function () {
beforeEach(testUtils.fixtures.insertPostsAndTags);
afterEach(function () {
return testUtils.truncate('posts_tags')
.then(function () {
return testUtils.truncate('tags');
})
.then(function () {
return testUtils.truncate('posts');
})
.then(function () {
return testUtils.truncate('posts_meta');
});
});
beforeEach(function () {
eventsTriggered = {};
sinon.stub(events, 'emit').callsFake(function (eventName, eventObj) {
if (!eventsTriggered[eventName]) {
eventsTriggered[eventName] = [];
}
eventsTriggered[eventName].push(eventObj);
});
});
it('[failure] multiple edits in one transaction', function () {
const options = _.cloneDeep(context);
const data = {
status: 'published'
};
return models.Base.transaction(function (txn) {
options.transacting = txn;
return models.Post.edit(data, _.merge({id: testUtils.DataGenerator.Content.posts[3].id}, options))
.then(function () {
return models.Post.edit(data, _.merge({id: testUtils.DataGenerator.Content.posts[5].id}, options));
})
.then(function () {
// force rollback
throw new Error();
});
}).catch(function () {
// txn was rolled back
Object.keys(eventsTriggered).length.should.eql(0);
});
});
it('multiple edits in one transaction', function () {
const options = _.cloneDeep(context);
const data = {
status: 'published'
};
return models.Base.transaction(function (txn) {
options.transacting = txn;
return models.Post.edit(data, _.merge({id: testUtils.DataGenerator.Content.posts[3].id}, options))
.then(function () {
return models.Post.edit(data, _.merge({id: testUtils.DataGenerator.Content.posts[5].id}, options));
});
}).then(function () {
// txn was successful
Object.keys(eventsTriggered).length.should.eql(4);
});
});
it('can change title', function (done) {
const postId = testUtils.DataGenerator.Content.posts[0].id;
models.Post.findOne({id: postId}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
post.title.should.not.equal('new title');
return models.Post.edit({title: 'new title'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.title.should.equal('new title');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.published.edited']);
should.exist(eventsTriggered['post.edited']);
done();
}).catch(done);
});
it('[failure] custom excerpt soft limit reached', function (done) {
const postId = testUtils.DataGenerator.Content.posts[0].id;
models.Post.findOne({id: postId}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
return models.Post.edit({
custom_excerpt: new Array(302).join('a')
}, _.extend({}, context, {id: postId}));
}).then(function () {
done(new Error('expected validation error'));
}).catch(function (err) {
(err[0] instanceof errors.ValidationError).should.eql(true);
done();
});
});
it('can publish draft post', function (done) {
const postId = testUtils.DataGenerator.Content.posts[3].id;
models.Post.findOne({id: postId, status: 'draft'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
post.status.should.equal('draft');
return models.Post.edit({status: 'published'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('published');
Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.published']);
should.exist(eventsTriggered['post.edited']);
should.exist(eventsTriggered['tag.attached']);
should.exist(eventsTriggered['user.attached']);
done();
}).catch(done);
});
it('can unpublish published post', function (done) {
const postId = testUtils.DataGenerator.Content.posts[0].id;
models.Post.findOne({id: postId}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
post.status.should.equal('published');
return models.Post.edit({status: 'draft'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('draft');
Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.unpublished']);
should.exist(eventsTriggered['post.edited']);
done();
}).catch(done);
});
it('draft -> scheduled without published_at update', function (done) {
let post;
models.Post.findOne({status: 'draft'}).then(function (results) {
should.exist(results);
post = results.toJSON();
post.status.should.equal('draft');
results.set('published_at', null);
return results.save();
}).then(function () {
return models.Post.edit({
status: 'scheduled'
}, _.extend({}, context, {id: post.id}));
}).then(function () {
done(new Error('expected error'));
}).catch(function (err) {
should.exist(err);
(err instanceof errors.ValidationError).should.eql(true);
done();
});
});
it('draft -> scheduled: expect update of published_at', function (done) {
const newPublishedAt = moment().add(1, 'day').toDate();
models.Post.findOne({status: 'draft'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.status.should.equal('draft');
return models.Post.edit({
status: 'scheduled',
published_at: newPublishedAt
}, _.extend({}, context, {id: post.id}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('scheduled');
// mysql does not store ms
moment(edited.attributes.published_at).startOf('seconds').diff(moment(newPublishedAt).startOf('seconds')).should.eql(0);
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.scheduled']);
should.exist(eventsTriggered['post.edited']);
done();
}).catch(done);
});
it('scheduled -> draft: expect unschedule', function (done) {
models.Post.findOne({status: 'scheduled'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.status.should.equal('scheduled');
return models.Post.edit({
status: 'draft'
}, _.extend({}, context, {id: post.id}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('draft');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.unscheduled']);
should.exist(eventsTriggered['post.edited']);
done();
}).catch(done);
});
it('scheduled -> scheduled with updated published_at', function (done) {
models.Post.findOne({status: 'scheduled'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.status.should.equal('scheduled');
return models.Post.edit({
status: 'scheduled',
published_at: moment().add(20, 'days').toDate()
}, _.extend({}, context, {id: post.id}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('scheduled');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.rescheduled']);
should.exist(eventsTriggered['post.edited']);
done();
}).catch(done);
});
it('scheduled -> scheduled with unchanged published_at', function (done) {
models.Post.findOne({status: 'scheduled'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.status.should.equal('scheduled');
return models.Post.edit({
status: 'scheduled'
}, _.extend({}, context, {id: post.id}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('scheduled');
// nothing has changed
Object.keys(eventsTriggered).length.should.eql(0);
done();
}).catch(done);
});
it('scheduled -> scheduled with unchanged published_at (within the 2 minutes window)', function (done) {
let post;
models.Post.findOne({status: 'scheduled'}).then(function (results) {
should.exist(results);
post = results.toJSON();
post.status.should.equal('scheduled');
results.set('published_at', moment().add(2, 'minutes').add(2, 'seconds').toDate());
return results.save();
}).then(function () {
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.edited']);
should.exist(eventsTriggered['post.rescheduled']);
eventsTriggered = {};
return Promise.delay(1000 * 3);
}).then(function () {
return models.Post.edit({
status: 'scheduled'
}, _.extend({}, context, {id: post.id}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('scheduled');
Object.keys(eventsTriggered).length.should.eql(1);
should.exist(eventsTriggered['post.edited']);
done();
}).catch(done);
});
it('published -> scheduled and expect update of published_at', function (done) {
const postId = testUtils.DataGenerator.Content.posts[0].id;
models.Post.findOne({id: postId}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
post.status.should.equal('published');
return models.Post.edit({
status: 'scheduled',
published_at: moment().add(1, 'day').toDate()
}, _.extend({}, context, {id: postId}));
}).then(function () {
done(new Error('change status from published to scheduled is not allowed right now!'));
}).catch(function (err) {
should.exist(err);
(err instanceof errors.ValidationError).should.eql(true);
done();
});
});
it('can convert draft post to page and back', function (done) {
const postId = testUtils.DataGenerator.Content.posts[3].id;
models.Post.findOne({id: postId, status: 'draft'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
post.status.should.equal('draft');
return models.Post.edit({type: 'page'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('draft');
edited.attributes.type.should.equal('page');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['page.added']);
return models.Post.edit({type: 'post'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('draft');
edited.attributes.type.should.equal('post');
Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['page.added']);
should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['post.added']);
done();
}).catch(done);
});
it('can convert draft to schedule AND post to page and back', function (done) {
models.Post.findOne({status: 'draft'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.status.should.equal('draft');
return models.Post.edit({
type: 'page',
status: 'scheduled',
published_at: moment().add(10, 'days')
}, _.extend({}, context, {id: post.id}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('scheduled');
edited.attributes.type.should.equal('page');
Object.keys(eventsTriggered).length.should.eql(3);
should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['page.added']);
should.exist(eventsTriggered['page.scheduled']);
return models.Post.edit({type: 'post'}, _.extend({}, context, {id: edited.id}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('scheduled');
edited.attributes.type.should.equal('post');
Object.keys(eventsTriggered).length.should.eql(7);
should.exist(eventsTriggered['page.unscheduled']);
should.exist(eventsTriggered['page.deleted']);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['post.scheduled']);
done();
}).catch(done);
});
it('can convert published post to page and back', function (done) {
const postId = testUtils.DataGenerator.Content.posts[0].id;
models.Post.findOne({id: postId}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
post.status.should.equal('published');
return models.Post.edit({type: 'page'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('published');
edited.attributes.type.should.equal('page');
Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.unpublished']);
should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['page.added']);
should.exist(eventsTriggered['page.published']);
return models.Post.edit({type: 'post'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('published');
edited.attributes.type.should.equal('post');
Object.keys(eventsTriggered).length.should.eql(8);
should.exist(eventsTriggered['page.unpublished']);
should.exist(eventsTriggered['page.deleted']);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['post.published']);
done();
}).catch(done);
});
it('can change type and status at the same time', function (done) {
const postId = testUtils.DataGenerator.Content.posts[3].id;
models.Post.findOne({id: postId, status: 'draft'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
post.status.should.equal('draft');
return models.Post.edit({type: 'page', status: 'published'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('published');
edited.attributes.type.should.equal('page');
Object.keys(eventsTriggered).length.should.eql(5);
should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['page.added']);
should.exist(eventsTriggered['page.published']);
should.exist(eventsTriggered['tag.attached']);
should.exist(eventsTriggered['user.attached']);
return models.Post.edit({type: 'post', status: 'draft'}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('draft');
edited.attributes.type.should.equal('post');
Object.keys(eventsTriggered).length.should.eql(8);
should.exist(eventsTriggered['page.unpublished']);
should.exist(eventsTriggered['page.deleted']);
should.exist(eventsTriggered['post.added']);
done();
}).catch(done);
});
it('cannot override the published_by setting', function (done) {
const postId = testUtils.DataGenerator.Content.posts[3].id;
models.Post.findOne({id: postId, status: 'draft'}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(postId);
post.status.should.equal('draft');
// Test changing status and published_by at the same time
return models.Post.edit({
status: 'published',
published_by: 4
}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('published');
edited.attributes.published_by.should.equal(context.context.user);
// Test changing status and published_by on its own
return models.Post.edit({published_by: 4}, _.extend({}, context, {id: postId}));
}).then(function (edited) {
should.exist(edited);
edited.attributes.status.should.equal('published');
edited.attributes.published_by.should.equal(context.context.user);
done();
}).catch(done);
});
✨ Added ability to send a newsletter to members with a certain label or product (#12932) refs https://github.com/TryGhost/Team/issues/581 refs https://github.com/TryGhost/Team/issues/582 When publishing a post via the API it was possible to send it using `?email_recipient_filter=all/free/paid` which allowed you to send to members only based on their payment status which is quite limiting for some sites. This PR updates the `?email_recipient_filter` query param to support Ghost's `?filter` param syntax which enables more specific recipient lists, eg: `?email_recipient_filter=status:free` = free members only `?email_recipient_filter=status:paid` = paid members only `?email_recipient_filter=label:vip` = members that have the `vip` label attached `?email_recipient_filter=status:paid,label:vip` = paid members and members that have the `vip` label attached The older `free/paid` values are still supported by the API for backwards compatibility. - updates `Post` and `Email` models to transform legacy `free` and `paid` values to their NQL equivalents on read/write - lets us not worry about supporting legacy values elsewhere in the code - cleanup migration to transform all rows slated for 5.0 - removes schema and API `isIn` validations for recipient filters so allow free-form filters - updates posts API input serializers to transform `free` and `paid` values in the `?email_recipient_filter` param to their NQL equivalents for backwards compatibility - updates Post API controllers `edit` methods to run a query using the supplied filter to verify that it's valid - updates `mega` service to use the filter directly when selecting recipients
2021-05-07 13:56:41 +03:00
it('transforms legacy email_recipient_filter values on save', function (done) {
const postId = testUtils.DataGenerator.Content.posts[3].id;
models.Post.findOne({id: postId}).then(() => {
return models.Post.edit({
email_recipient_filter: 'free'
}, _.extend({}, context, {id: postId}));
}).then((edited) => {
edited.attributes.email_recipient_filter.should.equal('status:free');
return db.knex('posts').where({id: edited.id});
}).then((knexResult) => {
const [knexPost] = knexResult;
knexPost.email_recipient_filter.should.equal('status:free');
done();
}).catch(done);
});
it('transforms special-case visibility values on save', function (done) {
// status:-free === paid
// status:-free,status:free (+variations) === members
const postId = testUtils.DataGenerator.Content.posts[3].id;
models.Post.findOne({id: postId}).then(() => {
return models.Post.edit({
visibility: 'status:-free'
}, _.extend({}, context, {id: postId}));
}).then((edited) => {
edited.attributes.visibility.should.equal('paid');
return db.knex('posts').where({id: edited.id});
}).then((knexResult) => {
const [knexPost] = knexResult;
knexPost.visibility.should.equal('paid');
}).then(() => {
return models.Post.edit({
visibility: 'status:-free,status:free'
}, _.extend({}, context, {id: postId}));
}).then((edited) => {
edited.attributes.visibility.should.equal('members');
return models.Post.edit({
visibility: 'status:free,status:-free'
}, _.extend({}, context, {id: postId}));
}).then((edited) => {
edited.attributes.visibility.should.equal('members');
return models.Post.edit({
visibility: 'status:free,status:-free,label:vip'
}, _.extend({}, context, {id: postId}));
}).then((edited) => {
edited.attributes.visibility.should.equal('members');
done();
}).catch(done);
});
});
describe('add', function () {
before(testUtils.fixtures.insertPostsAndTags);
after(function () {
return testUtils.truncate('posts_tags')
.then(function () {
return testUtils.truncate('tags');
})
.then(function () {
return testUtils.truncate('posts');
})
.then(function () {
return testUtils.truncate('posts_meta');
});
});
beforeEach(function () {
eventsTriggered = {};
sinon.stub(events, 'emit').callsFake(function (eventName, eventObj) {
if (!eventsTriggered[eventName]) {
eventsTriggered[eventName] = [];
}
eventsTriggered[eventName].push(eventObj);
});
});
it('can add, defaults are all correct', function (done) {
let createdPostUpdatedDate;
const newPost = testUtils.DataGenerator.forModel.posts[2];
const newPostDB = testUtils.DataGenerator.Content.posts[2];
models.Post.add(newPost, _.merge({withRelated: ['author']}, context)).then(function (createdPost) {
return models.Post.findOne({id: createdPost.id, status: 'all'});
}).then(function (createdPost) {
should.exist(createdPost);
createdPost.has('uuid').should.equal(true);
createdPost.get('status').should.equal('draft');
createdPost.get('title').should.equal(newPost.title, 'title is correct');
createdPost.get('mobiledoc').should.equal(newPost.mobiledoc, 'mobiledoc is correct');
createdPost.has('html').should.equal(true);
createdPost.get('html').should.equal(newPostDB.html);
createdPost.has('plaintext').should.equal(true);
createdPost.get('plaintext').should.match(/^testing/);
createdPost.get('slug').should.equal(newPostDB.slug + '-2');
(!!createdPost.get('featured')).should.equal(false);
(!!createdPost.get('page')).should.equal(false);
should.equal(createdPost.get('locale'), null);
should.equal(createdPost.get('visibility'), 'public');
// testing for nulls
(createdPost.get('feature_image') === null).should.equal(true);
createdPost.get('created_at').should.be.above(new Date(0).getTime());
createdPost.get('created_by').should.equal(testUtils.DataGenerator.Content.users[0].id);
createdPost.get('author_id').should.equal(testUtils.DataGenerator.Content.users[0].id);
createdPost.has('author').should.equal(false);
createdPost.get('created_by').should.equal(createdPost.get('author_id'));
createdPost.get('updated_at').should.be.above(new Date(0).getTime());
createdPost.get('updated_by').should.equal(testUtils.DataGenerator.Content.users[0].id);
should.equal(createdPost.get('published_at'), null);
should.equal(createdPost.get('published_by'), null);
createdPostUpdatedDate = createdPost.get('updated_at');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
// Set the status to published to check that `published_at` is set.
return createdPost.save({status: 'published'}, context);
}).then(function (publishedPost) {
publishedPost.get('published_at').should.be.instanceOf(Date);
publishedPost.get('published_by').should.equal(testUtils.DataGenerator.Content.users[0].id);
publishedPost.get('updated_at').should.be.instanceOf(Date);
publishedPost.get('updated_by').should.equal(testUtils.DataGenerator.Content.users[0].id);
publishedPost.get('updated_at').should.not.equal(createdPostUpdatedDate);
Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.published']);
should.exist(eventsTriggered['post.edited']);
done();
}).catch(done);
});
it('can add, default visibility is taken from settings cache', function (done) {
const originalSettingsCacheGetFn = settingsCache.get;
sinon.stub(settingsCache, 'get')
.callsFake(function (key, options) {
if (key === 'labs') {
return {
members: true
};
} else if (key === 'default_content_visibility') {
return 'paid';
}
return originalSettingsCacheGetFn(key, options);
});
let createdPostUpdatedDate;
const newPost = testUtils.DataGenerator.forModel.posts[2];
const newPostDB = testUtils.DataGenerator.Content.posts[2];
models.Post.add(newPost, _.merge({withRelated: ['author']}, context)).then(function (createdPost) {
return models.Post.findOne({id: createdPost.id, status: 'all'});
}).then(function (createdPost) {
should.exist(createdPost);
createdPost.has('uuid').should.equal(true);
createdPost.get('status').should.equal('draft');
createdPost.get('title').should.equal(newPost.title, 'title is correct');
createdPost.get('mobiledoc').should.equal(newPost.mobiledoc, 'mobiledoc is correct');
createdPost.has('html').should.equal(true);
createdPost.get('html').should.equal(newPostDB.html);
createdPost.has('plaintext').should.equal(true);
createdPost.get('plaintext').should.match(/^testing/);
// createdPost.get('slug').should.equal(newPostDB.slug + '-3');
(!!createdPost.get('featured')).should.equal(false);
(!!createdPost.get('page')).should.equal(false);
should.equal(createdPost.get('locale'), null);
should.equal(createdPost.get('visibility'), 'paid');
// testing for nulls
(createdPost.get('feature_image') === null).should.equal(true);
createdPost.get('created_at').should.be.above(new Date(0).getTime());
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
createdPost.get('created_by').should.equal(testUtils.DataGenerator.Content.users[0].id);
createdPost.get('author_id').should.equal(testUtils.DataGenerator.Content.users[0].id);
createdPost.has('author').should.equal(false);
createdPost.get('created_by').should.equal(createdPost.get('author_id'));
createdPost.get('updated_at').should.be.above(new Date(0).getTime());
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
createdPost.get('updated_by').should.equal(testUtils.DataGenerator.Content.users[0].id);
should.equal(createdPost.get('published_at'), null);
should.equal(createdPost.get('published_by'), null);
createdPostUpdatedDate = createdPost.get('updated_at');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
// Set the status to published to check that `published_at` is set.
return createdPost.save({status: 'published'}, context);
}).then(function (publishedPost) {
publishedPost.get('published_at').should.be.instanceOf(Date);
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
publishedPost.get('published_by').should.equal(testUtils.DataGenerator.Content.users[0].id);
publishedPost.get('updated_at').should.be.instanceOf(Date);
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
publishedPost.get('updated_by').should.equal(testUtils.DataGenerator.Content.users[0].id);
publishedPost.get('updated_at').should.not.equal(createdPostUpdatedDate);
Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.published']);
should.exist(eventsTriggered['post.edited']);
done();
}).catch(done);
});
it('can add, with previous published_at date', function (done) {
const previousPublishedAtDate = new Date(2013, 8, 21, 12);
models.Post.add({
status: 'published',
published_at: previousPublishedAtDate,
title: 'published_at test',
mobiledoc: markdownToMobiledoc('This is some content')
}, context).then(function (newPost) {
should.exist(newPost);
new Date(newPost.get('published_at')).getTime().should.equal(previousPublishedAtDate.getTime());
Object.keys(eventsTriggered).length.should.eql(3);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['post.published']);
should.exist(eventsTriggered['user.attached']);
done();
}).catch(done);
});
it('add draft post without published_at -> we expect no auto insert of published_at', function (done) {
models.Post.add({
status: 'draft',
title: 'draft 1',
mobiledoc: markdownToMobiledoc('This is some content')
}, context).then(function (newPost) {
should.exist(newPost);
should.not.exist(newPost.get('published_at'));
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
done();
}).catch(done);
});
✨ Multiple authors (#9426) no issue This PR adds the server side logic for multiple authors. This adds the ability to add multiple authors per post. We keep and support single authors (maybe till the next major - this is still in discussion) ### key notes - `authors` are not fetched by default, only if we need them - the migration script iterates over all posts and figures out if an author_id is valid and exists (in master we can add invalid author_id's) and then adds the relation (falls back to owner if invalid) - ~~i had to push a fork of bookshelf to npm because we currently can't bump bookshelf + the two bugs i discovered are anyway not yet merged (https://github.com/kirrg001/bookshelf/commits/master)~~ replaced by new bookshelf release - the implementation of single & multiple authors lives in a single place (introduction of a new concept: model relation) - if you destroy an author, we keep the behaviour for now -> remove all posts where the primary author id matches. furthermore, remove all relations in posts_authors (e.g. secondary author) - we make re-use of the `excludeAttrs` concept which was invented in the contributors PR (to protect editing authors as author/contributor role) -> i've added a clear todo that we need a logic to make a diff of the target relation -> both for tags and authors - `authors` helper available (same as `tags` helper) - `primary_author` computed field available - `primary_author` functionality available (same as `primary_tag` e.g. permalinks, prev/next helper etc)
2018-03-27 17:16:15 +03:00
it('add multiple authors', function (done) {
models.Post.add({
✨ Multiple authors (#9426) no issue This PR adds the server side logic for multiple authors. This adds the ability to add multiple authors per post. We keep and support single authors (maybe till the next major - this is still in discussion) ### key notes - `authors` are not fetched by default, only if we need them - the migration script iterates over all posts and figures out if an author_id is valid and exists (in master we can add invalid author_id's) and then adds the relation (falls back to owner if invalid) - ~~i had to push a fork of bookshelf to npm because we currently can't bump bookshelf + the two bugs i discovered are anyway not yet merged (https://github.com/kirrg001/bookshelf/commits/master)~~ replaced by new bookshelf release - the implementation of single & multiple authors lives in a single place (introduction of a new concept: model relation) - if you destroy an author, we keep the behaviour for now -> remove all posts where the primary author id matches. furthermore, remove all relations in posts_authors (e.g. secondary author) - we make re-use of the `excludeAttrs` concept which was invented in the contributors PR (to protect editing authors as author/contributor role) -> i've added a clear todo that we need a logic to make a diff of the target relation -> both for tags and authors - `authors` helper available (same as `tags` helper) - `primary_author` computed field available - `primary_author` functionality available (same as `primary_tag` e.g. permalinks, prev/next helper etc)
2018-03-27 17:16:15 +03:00
status: 'draft',
title: 'draft 1',
mobiledoc: markdownToMobiledoc('This is some content'),
authors: [{
id: testUtils.DataGenerator.forKnex.users[0].id,
name: testUtils.DataGenerator.forKnex.users[0].name
}]
}, _.merge({withRelated: ['authors']}, context)).then(function (newPost) {
should.exist(newPost);
newPost.toJSON().author.should.eql(testUtils.DataGenerator.forKnex.users[0].id);
newPost.toJSON().authors.length.should.eql(1);
newPost.toJSON().authors[0].id.should.eql(testUtils.DataGenerator.forKnex.users[0].id);
done();
}).catch(done);
});
it('add draft post with published_at -> we expect published_at to exist', function (done) {
models.Post.add({
status: 'draft',
published_at: moment().toDate(),
title: 'draft 1',
mobiledoc: markdownToMobiledoc('This is some content')
}, context).then(function (newPost) {
should.exist(newPost);
should.exist(newPost.get('published_at'));
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
done();
}).catch(done);
});
it('add scheduled post without published_at -> we expect an error', function (done) {
models.Post.add({
status: 'scheduled',
title: 'scheduled 1',
mobiledoc: markdownToMobiledoc('This is some content')
}, context).catch(function (err) {
should.exist(err);
(err instanceof errors.ValidationError).should.eql(true);
Object.keys(eventsTriggered).length.should.eql(0);
done();
});
});
it('add scheduled post with published_at not in future-> we expect an error', function (done) {
models.Post.add({
status: 'scheduled',
published_at: moment().subtract(1, 'minute'),
title: 'scheduled 1',
mobiledoc: markdownToMobiledoc('This is some content')
}, context).catch(function (err) {
should.exist(err);
(err instanceof errors.ValidationError).should.eql(true);
Object.keys(eventsTriggered).length.should.eql(0);
done();
});
});
it('add scheduled post with published_at 1 minutes in future -> we expect an error', function (done) {
models.Post.add({
status: 'scheduled',
published_at: moment().add(1, 'minute'),
title: 'scheduled 1',
mobiledoc: markdownToMobiledoc('This is some content')
}, context).catch(function (err) {
(err instanceof errors.ValidationError).should.eql(true);
Object.keys(eventsTriggered).length.should.eql(0);
done();
});
});
it('add scheduled post with published_at 10 minutes in future -> we expect success', function (done) {
models.Post.add({
status: 'scheduled',
published_at: moment().add(10, 'minute'),
title: 'scheduled 1',
mobiledoc: markdownToMobiledoc('This is some content')
}, context).then(function (post) {
should.exist(post);
Object.keys(eventsTriggered).length.should.eql(3);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['post.scheduled']);
should.exist(eventsTriggered['user.attached']);
done();
}).catch(done);
});
it('can generate a non conflicting slug', function (done) {
// Create 12 posts with the same title
sequence(_.times(12, function (i) {
return function () {
return models.Post.add({
title: 'Test Title',
mobiledoc: markdownToMobiledoc('Test Content ' + (i + 1))
}, context);
};
})).then(function (createdPosts) {
// Should have created 12 posts
createdPosts.length.should.equal(12);
// Should have unique slugs and contents
_(createdPosts).each(function (post, i) {
const num = i + 1;
// First one has normal title
if (num === 1) {
post.get('slug').should.equal('test-title');
return;
}
post.get('slug').should.equal('test-title-' + num);
JSON.parse(post.get('mobiledoc')).cards[0][1].markdown.should.equal('Test Content ' + num);
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
eventsTriggered['post.added'].length.should.eql(12);
});
done();
}).catch(done);
});
it('can generate slugs without duplicate hyphens', function (done) {
const newPost = {
title: 'apprehensive titles have too many spaces—and m-dashes — and also n-dashes ',
mobiledoc: markdownToMobiledoc('Test Content 1')
};
models.Post.add(newPost, context).then(function (createdPost) {
createdPost.get('slug').should.equal('apprehensive-titles-have-too-many-spaces-and-m-dashes-and-also-n-dashes');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
done();
}).catch(done);
});
it('can generate a safe slug when a protected keyword is used', function (done) {
const newPost = {
title: 'rss',
mobiledoc: markdownToMobiledoc('Test Content 1')
};
models.Post.add(newPost, context).then(function (createdPost) {
createdPost.get('slug').should.not.equal('rss');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
done();
});
});
it('can generate slugs without non-ascii characters', function (done) {
const newPost = {
title: 'भुते धडकी भरवणारा आहेत',
mobiledoc: markdownToMobiledoc('Test Content 1')
};
models.Post.add(newPost, context).then(function (createdPost) {
createdPost.get('slug').should.equal('bhute-dhddkii-bhrvnnaaraa-aahet');
done();
}).catch(done);
});
it('detects duplicate slugs before saving', function (done) {
const firstPost = {
title: 'First post',
mobiledoc: markdownToMobiledoc('First content 1')
};
const secondPost = {
title: 'Second post',
mobiledoc: markdownToMobiledoc('Second content 1')
};
// Create the first post
models.Post.add(firstPost, context)
.then(function (createdFirstPost) {
// Store the slug for later
firstPost.slug = createdFirstPost.get('slug');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
// Create the second post
return models.Post.add(secondPost, context);
}).then(function (createdSecondPost) {
// Store the slug for comparison later
secondPost.slug = createdSecondPost.get('slug');
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['post.added']);
should.exist(eventsTriggered['user.attached']);
// Update with a conflicting slug from the first post
return createdSecondPost.save({
slug: firstPost.slug
}, context);
}).then(function (updatedSecondPost) {
// Should have updated from original
updatedSecondPost.get('slug').should.not.equal(secondPost.slug);
// Should not have a conflicted slug from the first
updatedSecondPost.get('slug').should.not.equal(firstPost.slug);
Object.keys(eventsTriggered).length.should.eql(3);
should.exist(eventsTriggered['post.edited']);
return models.Post.findOne({
id: updatedSecondPost.id,
status: 'all'
});
}).then(function (foundPost) {
// Should have updated from original
foundPost.get('slug').should.not.equal(secondPost.slug);
// Should not have a conflicted slug from the first
foundPost.get('slug').should.not.equal(firstPost.slug);
done();
}).catch(done);
});
it('it stores urls as transform-ready and reads as absolute', function (done) {
const post = {
title: 'Absolute->Transform-ready URL Transform Test',
mobiledoc: '{"version":"0.3.1","atoms":[],"cards":[["image",{"src":"http://127.0.0.1:2369/content/images/card.jpg"}]],"markups":[["a",["href","http://127.0.0.1:2369/test"]]],"sections":[[1,"p",[[0,[0],1,"Testing"]]],[10,0]]}',
custom_excerpt: 'Testing <a href="http://127.0.0.1:2369/internal">links</a> in custom excerpts',
codeinjection_head: '<script src="http://127.0.0.1:2369/assets/head.js"></script>',
codeinjection_foot: '<script src="http://127.0.0.1:2369/assets/foot.js"></script>',
feature_image: 'http://127.0.0.1:2369/content/images/feature.png',
canonical_url: 'http://127.0.0.1:2369/canonical',
posts_meta: {
og_image: 'http://127.0.0.1:2369/content/images/og.png',
twitter_image: 'http://127.0.0.1:2369/content/images/twitter.png'
}
};
models.Post.add(post, context).then((createdPost) => {
createdPost.get('mobiledoc').should.equal('{"version":"0.3.1","atoms":[],"cards":[["image",{"src":"http://127.0.0.1:2369/content/images/card.jpg"}]],"markups":[["a",["href","http://127.0.0.1:2369/test"]]],"sections":[[1,"p",[[0,[0],1,"Testing"]]],[10,0]]}');
createdPost.get('html').should.equal('<p><a href="http://127.0.0.1:2369/test">Testing</a></p><figure class="kg-card kg-image-card"><img src="http://127.0.0.1:2369/content/images/card.jpg" class="kg-image" alt loading="lazy"></figure>');
createdPost.get('plaintext').should.containEql('Testing [http://127.0.0.1:2369/test]');
createdPost.get('custom_excerpt').should.equal('Testing <a href="http://127.0.0.1:2369/internal">links</a> in custom excerpts');
createdPost.get('codeinjection_head').should.equal('<script src="http://127.0.0.1:2369/assets/head.js"></script>');
createdPost.get('codeinjection_foot').should.equal('<script src="http://127.0.0.1:2369/assets/foot.js"></script>');
createdPost.get('feature_image').should.equal('http://127.0.0.1:2369/content/images/feature.png');
createdPost.get('canonical_url').should.equal('http://127.0.0.1:2369/canonical');
const postMeta = createdPost.relations.posts_meta;
postMeta.get('og_image').should.equal('http://127.0.0.1:2369/content/images/og.png');
postMeta.get('twitter_image').should.equal('http://127.0.0.1:2369/content/images/twitter.png');
// ensure canonical_url is not transformed when protocol does not match
return createdPost.save({
canonical_url: 'https://127.0.0.1:2369/https-internal',
// sanity check for general absolute->relative transform during edits
feature_image: 'http://127.0.0.1:2369/content/images/updated_feature.png'
});
}).then((updatedPost) => {
updatedPost.get('canonical_url').should.equal('https://127.0.0.1:2369/https-internal');
updatedPost.get('feature_image').should.equal('http://127.0.0.1:2369/content/images/updated_feature.png');
return updatedPost;
}).then((updatedPost) => {
return db.knex('posts').where({id: updatedPost.id});
}).then((knexResult) => {
const [knexPost] = knexResult;
knexPost.mobiledoc.should.equal('{"version":"0.3.1","atoms":[],"cards":[["image",{"src":"__GHOST_URL__/content/images/card.jpg"}]],"markups":[["a",["href","__GHOST_URL__/test"]]],"sections":[[1,"p",[[0,[0],1,"Testing"]]],[10,0]]}');
knexPost.html.should.equal('<p><a href="__GHOST_URL__/test">Testing</a></p><figure class="kg-card kg-image-card"><img src="__GHOST_URL__/content/images/card.jpg" class="kg-image" alt loading="lazy"></figure>');
knexPost.plaintext.should.containEql('Testing [__GHOST_URL__/test]');
knexPost.custom_excerpt.should.equal('Testing <a href="__GHOST_URL__/internal">links</a> in custom excerpts');
knexPost.codeinjection_head.should.equal('<script src="__GHOST_URL__/assets/head.js"></script>');
knexPost.codeinjection_foot.should.equal('<script src="__GHOST_URL__/assets/foot.js"></script>');
knexPost.feature_image.should.equal('__GHOST_URL__/content/images/updated_feature.png');
knexPost.canonical_url.should.equal('https://127.0.0.1:2369/https-internal');
done();
}).catch(done);
});
});
describe('destroy', function () {
beforeEach(testUtils.fixtures.insertPostsAndTags);
afterEach(function () {
return testUtils.truncate('posts_tags')
.then(function () {
return testUtils.truncate('tags');
})
.then(function () {
return testUtils.truncate('posts');
})
.then(function () {
return testUtils.truncate('posts_meta');
});
});
beforeEach(function () {
eventsTriggered = {};
sinon.stub(events, 'emit').callsFake(function (eventName, eventObj) {
if (!eventsTriggered[eventName]) {
eventsTriggered[eventName] = [];
}
eventsTriggered[eventName].push(eventObj);
});
});
it('published post', function (done) {
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
// We're going to try deleting post id 1 which has tag id 1
const firstItemData = {id: testUtils.DataGenerator.Content.posts[0].id};
// Test that we have the post we expect, with exactly one tag
models.Post.findOne(firstItemData, {withRelated: ['tags']}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(firstItemData.id);
post.status.should.equal('published');
post.tags.should.have.length(2);
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
post.tags[0].id.should.equal(testUtils.DataGenerator.Content.tags[0].id);
// Destroy the post
return results.destroy();
}).then(function (response) {
const deleted = response.toJSON();
should.equal(deleted.author, undefined);
Object.keys(eventsTriggered).length.should.eql(5);
should.exist(eventsTriggered['post.unpublished']);
should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['user.detached']);
should.exist(eventsTriggered['tag.detached']);
should.exist(eventsTriggered['post.tag.detached']);
// Double check we can't find the post again
return models.Post.findOne(firstItemData);
}).then(function (newResults) {
should.equal(newResults, null);
// Double check we can't find any related tags
return ghostBookshelf.knex.select().table('posts_tags').where('post_id', firstItemData.id);
}).then(function (postsTags) {
postsTags.should.be.empty();
done();
}).catch(done);
});
it('draft post', function (done) {
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
// We're going to try deleting post 4 which also has tag 4
const firstItemData = {id: testUtils.DataGenerator.Content.posts[3].id, status: 'draft'};
// Test that we have the post we expect, with exactly one tag
models.Post.findOne(firstItemData, {withRelated: ['tags']}).then(function (results) {
let post;
should.exist(results);
post = results.toJSON();
post.id.should.equal(firstItemData.id);
post.tags.should.have.length(1);
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
post.tags[0].id.should.equal(testUtils.DataGenerator.Content.tags[3].id);
// Destroy the post
return results.destroy(firstItemData);
}).then(function (response) {
const deleted = response.toJSON();
should.equal(deleted.author, undefined);
Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['tag.detached']);
should.exist(eventsTriggered['post.tag.detached']);
should.exist(eventsTriggered['user.detached']);
// Double check we can't find the post again
return models.Post.findOne(firstItemData);
}).then(function (newResults) {
should.equal(newResults, null);
// Double check we can't find any related tags
return ghostBookshelf.knex.select().table('posts_tags').where('post_id', firstItemData.id);
}).then(function (postsTags) {
postsTags.should.be.empty();
done();
}).catch(done);
});
it('published page', function (done) {
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
// We're going to try deleting page 6 which has tag 1
const firstItemData = {id: testUtils.DataGenerator.Content.posts[5].id};
// Test that we have the post we expect, with exactly one tag
models.Post.findOne(firstItemData, {withRelated: ['tags']}).then(function (results) {
let page;
should.exist(results);
page = results.toJSON();
page.id.should.equal(firstItemData.id);
page.status.should.equal('published');
page.type.should.equal('page');
// Destroy the page
return results.destroy(firstItemData);
}).then(function (response) {
const deleted = response.toJSON();
should.equal(deleted.author, undefined);
Object.keys(eventsTriggered).length.should.eql(3);
should.exist(eventsTriggered['page.unpublished']);
should.exist(eventsTriggered['page.deleted']);
should.exist(eventsTriggered['user.detached']);
// Double check we can't find the post again
return models.Post.findOne(firstItemData);
}).then(function (newResults) {
should.equal(newResults, null);
// Double check we can't find any related tags
return ghostBookshelf.knex.select().table('posts_tags').where('post_id', firstItemData.id);
}).then(function (postsTags) {
postsTags.should.be.empty();
done();
}).catch(done);
});
it('draft page', function (done) {
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
// We're going to try deleting post 7 which has tag 4
const firstItemData = {id: testUtils.DataGenerator.Content.posts[6].id, status: 'draft'};
// Test that we have the post we expect, with exactly one tag
models.Post.findOne(firstItemData, {withRelated: ['tags']}).then(function (results) {
let page;
should.exist(results);
page = results.toJSON();
page.id.should.equal(firstItemData.id);
// Destroy the page
return results.destroy(firstItemData);
}).then(function (response) {
const deleted = response.toJSON();
should.equal(deleted.author, undefined);
Object.keys(eventsTriggered).length.should.eql(2);
should.exist(eventsTriggered['page.deleted']);
should.exist(eventsTriggered['user.detached']);
// Double check we can't find the post again
return models.Post.findOne(firstItemData);
}).then(function (newResults) {
should.equal(newResults, null);
// Double check we can't find any related tags
return ghostBookshelf.knex.select().table('posts_tags').where('post_id', firstItemData.id);
}).then(function (postsTags) {
postsTags.should.be.empty();
done();
}).catch(done);
});
});
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
describe('Collision Protection', function () {
before(testUtils.fixtures.insertPostsAndTags);
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
after(function () {
return testUtils.truncate('posts_tags')
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
.then(function () {
return testUtils.truncate('tags');
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
})
.then(function () {
return testUtils.truncate('posts');
})
.then(function () {
return testUtils.truncate('posts_meta');
});
});
it('update post title, but updated_at is out of sync', function () {
const postToUpdate = {id: testUtils.DataGenerator.Content.posts[1].id};
return models.Post.edit({
title: 'New Post Title',
updated_at: moment().subtract(1, 'day').format()
}, _.extend({}, context, {id: postToUpdate.id}))
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
.then(function () {
throw new Error('expected no success');
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
})
.catch(function (err) {
err.code.should.eql('UPDATE_COLLISION');
});
});
it('update post tags and updated_at is out of sync', function () {
const postToUpdate = {id: testUtils.DataGenerator.Content.posts[1].id};
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
return models.Post.edit({
tags: [{name: 'new-tag-1'}],
updated_at: moment().subtract(1, 'day').format()
}, _.extend({}, context, {id: postToUpdate.id}))
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
.then(function () {
throw new Error('expected no success');
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
})
.catch(function (err) {
err.code.should.eql('UPDATE_COLLISION');
});
});
it('update post authors and updated_at is out of sync', function () {
const postToUpdate = {id: testUtils.DataGenerator.Content.posts[1].id};
✨ Multiple authors (#9426) no issue This PR adds the server side logic for multiple authors. This adds the ability to add multiple authors per post. We keep and support single authors (maybe till the next major - this is still in discussion) ### key notes - `authors` are not fetched by default, only if we need them - the migration script iterates over all posts and figures out if an author_id is valid and exists (in master we can add invalid author_id's) and then adds the relation (falls back to owner if invalid) - ~~i had to push a fork of bookshelf to npm because we currently can't bump bookshelf + the two bugs i discovered are anyway not yet merged (https://github.com/kirrg001/bookshelf/commits/master)~~ replaced by new bookshelf release - the implementation of single & multiple authors lives in a single place (introduction of a new concept: model relation) - if you destroy an author, we keep the behaviour for now -> remove all posts where the primary author id matches. furthermore, remove all relations in posts_authors (e.g. secondary author) - we make re-use of the `excludeAttrs` concept which was invented in the contributors PR (to protect editing authors as author/contributor role) -> i've added a clear todo that we need a logic to make a diff of the target relation -> both for tags and authors - `authors` helper available (same as `tags` helper) - `primary_author` computed field available - `primary_author` functionality available (same as `primary_tag` e.g. permalinks, prev/next helper etc)
2018-03-27 17:16:15 +03:00
return models.Post.edit({
authors: [testUtils.DataGenerator.Content.users[3]],
updated_at: moment().subtract(1, 'day').format()
}, _.extend({}, context, {id: postToUpdate.id}))
✨ Multiple authors (#9426) no issue This PR adds the server side logic for multiple authors. This adds the ability to add multiple authors per post. We keep and support single authors (maybe till the next major - this is still in discussion) ### key notes - `authors` are not fetched by default, only if we need them - the migration script iterates over all posts and figures out if an author_id is valid and exists (in master we can add invalid author_id's) and then adds the relation (falls back to owner if invalid) - ~~i had to push a fork of bookshelf to npm because we currently can't bump bookshelf + the two bugs i discovered are anyway not yet merged (https://github.com/kirrg001/bookshelf/commits/master)~~ replaced by new bookshelf release - the implementation of single & multiple authors lives in a single place (introduction of a new concept: model relation) - if you destroy an author, we keep the behaviour for now -> remove all posts where the primary author id matches. furthermore, remove all relations in posts_authors (e.g. secondary author) - we make re-use of the `excludeAttrs` concept which was invented in the contributors PR (to protect editing authors as author/contributor role) -> i've added a clear todo that we need a logic to make a diff of the target relation -> both for tags and authors - `authors` helper available (same as `tags` helper) - `primary_author` computed field available - `primary_author` functionality available (same as `primary_tag` e.g. permalinks, prev/next helper etc)
2018-03-27 17:16:15 +03:00
.then(function () {
throw new Error('expected no success');
✨ Multiple authors (#9426) no issue This PR adds the server side logic for multiple authors. This adds the ability to add multiple authors per post. We keep and support single authors (maybe till the next major - this is still in discussion) ### key notes - `authors` are not fetched by default, only if we need them - the migration script iterates over all posts and figures out if an author_id is valid and exists (in master we can add invalid author_id's) and then adds the relation (falls back to owner if invalid) - ~~i had to push a fork of bookshelf to npm because we currently can't bump bookshelf + the two bugs i discovered are anyway not yet merged (https://github.com/kirrg001/bookshelf/commits/master)~~ replaced by new bookshelf release - the implementation of single & multiple authors lives in a single place (introduction of a new concept: model relation) - if you destroy an author, we keep the behaviour for now -> remove all posts where the primary author id matches. furthermore, remove all relations in posts_authors (e.g. secondary author) - we make re-use of the `excludeAttrs` concept which was invented in the contributors PR (to protect editing authors as author/contributor role) -> i've added a clear todo that we need a logic to make a diff of the target relation -> both for tags and authors - `authors` helper available (same as `tags` helper) - `primary_author` computed field available - `primary_author` functionality available (same as `primary_tag` e.g. permalinks, prev/next helper etc)
2018-03-27 17:16:15 +03:00
})
.catch(function (err) {
err.code.should.eql('UPDATE_COLLISION');
});
});
it('update post tags and updated_at is NOT out of sync', function () {
const postToUpdate = {id: testUtils.DataGenerator.Content.posts[1].id};
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
return models.Post.edit({
tags: [{name: 'new-tag-1'}]
}, _.extend({}, context, {id: postToUpdate.id}));
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
});
it('update post with no changes, but updated_at is out of sync', function () {
const postToUpdate = {id: testUtils.DataGenerator.Content.posts[1].id};
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
return models.Post.edit({
updated_at: moment().subtract(1, 'day').format()
}, _.extend({}, context, {id: postToUpdate.id}));
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
});
it('update post with old post title, but updated_at is out of sync', function () {
const postToUpdate = {
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
id: testUtils.DataGenerator.Content.posts[1].id,
title: testUtils.DataGenerator.forModel.posts[1].title
};
return models.Post.edit({
title: postToUpdate.title,
updated_at: moment().subtract(1, 'day').format()
}, _.extend({}, context, {id: postToUpdate.id}));
✨ post update collision detection (#8328) (#8362) closes #5599 If two users edit the same post, it can happen that they override each others content or post settings. With this change this won't happen anymore. ✨ Update collision for posts - add a new bookshelf plugin to detect these changes - use the `changed` object of bookshelf -> we don't have to create our own diff - compare client and server updated_at field - run editing posts in a transaction (see comments in code base) 🙀 update collision for tags - `updateTags` for adding posts on `onCreated` - happens after the post was inserted --> it's "okay" to attach the tags afterwards on insert --> there is no need to add collision for inserting data --> it's very hard to move the updateTags call to `onCreating`, because the `updateTags` function queries the database to look up the affected post - `updateTags` while editing posts on `onSaving` - all operations run in a transactions and are rolled back if something get's rejected - Post model edit: if we push a transaction from outside, take this one ✨ introduce options.forUpdate - if two queries happening in a transaction we have to signalise knex/mysql that we select for an update - otherwise the following case happens: >> you fetch posts for an update >> a user requests comes in and updates the post (e.g. sets title to "X") >> you update the fetched posts, title would get overriden to the old one use options.forUpdate and protect internal post updates: model listeners - use a transaction for listener updates - signalise forUpdate - write a complex test use options.forUpdate and protect internal post updates: scheduling - publish endpoint runs in a transaction - add complex test - @TODO: right now scheduling api uses posts api, therefor we had to extend the options for api's >> allowed to pass transactions through it >> but these are only allowed if defined from outside {opts: [...]} >> so i think this is fine and not dirty >> will wait for opinions >> alternatively we have to re-write the scheduling endpoint to use the models directly
2017-04-19 16:53:23 +03:00
});
});
});
describe('mobiledoc versioning', function () {
it('can create revisions', function () {
const newPost = {
mobiledoc: markdownToMobiledoc('a')
};
return models.Post.add(newPost, context)
.then((createdPost) => {
return models.Post.findOne({id: createdPost.id, status: 'all'});
})
.then((createdPost) => {
should.exist(createdPost);
return createdPost.save({mobiledoc: markdownToMobiledoc('b')}, context);
})
.then((updatedPost) => {
updatedPost.get('mobiledoc').should.equal(markdownToMobiledoc('b'));
return models.MobiledocRevision
.findAll({
filter: `post_id:${updatedPost.id}`
});
})
.then((mobiledocRevisions) => {
should.equal(mobiledocRevisions.length, 2);
mobiledocRevisions.toJSON()[0].mobiledoc.should.equal(markdownToMobiledoc('b'));
mobiledocRevisions.toJSON()[1].mobiledoc.should.equal(markdownToMobiledoc('a'));
});
});
it('keeps only 10 last revisions in FIFO style', function () {
let revisionedPost;
const newPost = {
mobiledoc: markdownToMobiledoc('revision: 0')
};
return models.Post.add(newPost, context)
.then((createdPost) => {
return models.Post.findOne({id: createdPost.id, status: 'all'});
})
.then((createdPost) => {
should.exist(createdPost);
revisionedPost = createdPost;
return sequence(_.times(11, (i) => {
return () => {
return models.Post.edit({
mobiledoc: markdownToMobiledoc('revision: ' + (i + 1))
}, _.extend({}, context, {id: createdPost.id}));
};
}));
})
.then(() => models.MobiledocRevision
.findAll({
filter: `post_id:${revisionedPost.id}`
})
)
.then((mobiledocRevisions) => {
should.equal(mobiledocRevisions.length, 10);
mobiledocRevisions.toJSON()[0].mobiledoc.should.equal(markdownToMobiledoc('revision: 11'));
mobiledocRevisions.toJSON()[9].mobiledoc.should.equal(markdownToMobiledoc('revision: 2'));
});
});
it('creates 2 revisions after first edit for previously unversioned post', function () {
let unversionedPost;
const newPost = {
title: 'post title',
mobiledoc: markdownToMobiledoc('a')
};
// passing 'migrating' flag to simulate unversioned post
const options = Object.assign(_.clone(context), {migrating: true});
return models.Post.add(newPost, options)
.then((createdPost) => {
should.exist(createdPost);
unversionedPost = createdPost;
createdPost.get('mobiledoc').should.equal(markdownToMobiledoc('a'));
return models.MobiledocRevision
.findAll({
filter: `post_id:${createdPost.id}`
});
})
.then((mobiledocRevisions) => {
should.equal(mobiledocRevisions.length, 0);
return models.Post.edit({
mobiledoc: markdownToMobiledoc('b')
}, _.extend({}, context, {id: unversionedPost.id}));
})
.then((editedPost) => {
should.exist(editedPost);
editedPost.get('mobiledoc').should.equal(markdownToMobiledoc('b'));
return models.MobiledocRevision
.findAll({
filter: `post_id:${editedPost.id}`
});
})
.then((mobiledocRevisions) => {
should.equal(mobiledocRevisions.length, 2);
mobiledocRevisions.toJSON()[0].mobiledoc.should.equal(markdownToMobiledoc('b'));
mobiledocRevisions.toJSON()[1].mobiledoc.should.equal(markdownToMobiledoc('a'));
});
});
});
describe('Multiauthor Posts', function () {
before(testUtils.teardownDb);
after(function () {
return testUtils.teardownDb()
.then(function () {
return testUtils.setup('users:roles')();
});
});
before(testUtils.setup('posts:mu'));
it('can destroy multiple posts by author', function (done) {
// We're going to delete all posts by user 1
const authorData = {id: testUtils.DataGenerator.Content.users[0].id};
models.Post.findAll({context: {internal: true}}).then(function (found) {
// There are 10 posts created by posts:mu fixture
found.length.should.equal(10);
return models.Post.destroyByAuthor(authorData);
}).then(function (results) {
// User 1 has 2 posts in the database (each user has proportionate amount)
// 2 = 10 / 5 (posts / users)
results.length.should.equal(2);
return models.Post.findAll({context: {internal: true}});
}).then(function (found) {
// Only 8 should remain
// 8 = 10 - 2
found.length.should.equal(8);
done();
}).catch(done);
});
});
2013-10-10 19:43:25 +04:00
describe('Post tag handling edge cases', function () {
let postJSON;
let tagJSON;
let editOptions;
const createTag = testUtils.DataGenerator.forKnex.createTag;
beforeEach(function () {
return testUtils.truncate('posts_tags')
.then(function () {
return testUtils.truncate('tags');
})
.then(function () {
return testUtils.truncate('posts');
})
.then(function () {
return testUtils.truncate('posts_meta');
});
});
✨ Multiple authors (#9426) no issue This PR adds the server side logic for multiple authors. This adds the ability to add multiple authors per post. We keep and support single authors (maybe till the next major - this is still in discussion) ### key notes - `authors` are not fetched by default, only if we need them - the migration script iterates over all posts and figures out if an author_id is valid and exists (in master we can add invalid author_id's) and then adds the relation (falls back to owner if invalid) - ~~i had to push a fork of bookshelf to npm because we currently can't bump bookshelf + the two bugs i discovered are anyway not yet merged (https://github.com/kirrg001/bookshelf/commits/master)~~ replaced by new bookshelf release - the implementation of single & multiple authors lives in a single place (introduction of a new concept: model relation) - if you destroy an author, we keep the behaviour for now -> remove all posts where the primary author id matches. furthermore, remove all relations in posts_authors (e.g. secondary author) - we make re-use of the `excludeAttrs` concept which was invented in the contributors PR (to protect editing authors as author/contributor role) -> i've added a clear todo that we need a logic to make a diff of the target relation -> both for tags and authors - `authors` helper available (same as `tags` helper) - `primary_author` computed field available - `primary_author` functionality available (same as `primary_tag` e.g. permalinks, prev/next helper etc)
2018-03-27 17:16:15 +03:00
beforeEach(function () {
tagJSON = [];
const post = _.cloneDeep(testUtils.DataGenerator.forModel.posts[0]);
const postTags = [
createTag({name: 'tag1', slug: 'tag1'}),
createTag({name: 'tag2', slug: 'tag2'}),
createTag({name: 'tag3', slug: 'tag3'})
];
const extraTags = [
createTag({name: 'existing tag a', slug: 'existing-tag-a'}),
createTag({name: 'existing-tag-b', slug: 'existing-tag-b'}),
createTag({name: 'existing_tag_c', slug: 'existing_tag_c'})
];
post.tags = postTags;
post.status = 'published';
return Promise.props({
post: models.Post.add(post, _.extend({}, context, {withRelated: ['tags']})),
tag1: models.Tag.add(extraTags[0], context),
tag2: models.Tag.add(extraTags[1], context),
tag3: models.Tag.add(extraTags[2], context)
}).then(function (result) {
Sorted out the mixed usages of `include` and `withRelated` (#9425) no issue - this commit cleans up the usages of `include` and `withRelated`. ### API layer (`include`) - as request parameter e.g. `?include=roles,tags` - as theme API parameter e.g. `{{get .... include="author"}}` - as internal API access e.g. `api.posts.browse({include: 'author,tags'})` - the `include` notation is more readable than `withRelated` - and it allows us to use a different easier format (comma separated list) - the API utility transforms these more readable properties into model style (or into Ghost style) ### Model access (`withRelated`) - e.g. `models.Post.findPage({withRelated: ['tags']})` - driven by bookshelf --- Commits explained. * Reorder the usage of `convertOptions` - 1. validation - 2. options convertion - 3. permissions - the reason is simple, the permission layer access the model layer - we have to prepare the options before talking to the model layer - added `convertOptions` where it was missed (not required, but for consistency reasons) * Use `withRelated` when accessing the model layer and use `include` when accessing the API layer * Change `convertOptions` API utiliy - API Usage - ghost.api(..., {include: 'tags,authors'}) - `include` should only be used when calling the API (either via request or via manual usage) - `include` is only for readability and easier format - Ghost (Model Layer Usage) - models.Post.findOne(..., {withRelated: ['tags', 'authors']}) - should only use `withRelated` - model layer cannot read 'tags,authors` - model layer has no idea what `include` means, speaks a different language - `withRelated` is bookshelf - internal usage * include-count plugin: use `withRelated` instead of `include` - imagine you outsource this plugin to git and publish it to npm - `include` is an unknown option in bookshelf * Updated `permittedOptions` in base model - `include` is no longer a known option * Remove all occurances of `include` in the model layer * Extend `filterOptions` base function - this function should be called as first action - we clone the unfiltered options - check if you are using `include` (this is a protection which could help us in the beginning) - check for permitted and (later on default `withRelated`) options - the usage is coming in next commit * Ensure we call `filterOptions` as first action - use `ghostBookshelf.Model.filterOptions` as first action - consistent naming pattern for incoming options: `unfilteredOptions` - re-added allowed options for `toJSON` - one unsolved architecture problem: - if you override a function e.g. `edit` - then you should call `filterOptions` as first action - the base implementation of e.g. `edit` will call it again - future improvement * Removed `findOne` from Invite model - no longer needed, the base implementation is the same
2018-02-15 12:53:53 +03:00
postJSON = result.post.toJSON({withRelated: ['tags']});
tagJSON.push(result.tag1.toJSON());
tagJSON.push(result.tag2.toJSON());
tagJSON.push(result.tag3.toJSON());
editOptions = _.extend({}, context, {id: postJSON.id, withRelated: ['tags']});
// reset the eventSpy here
sinon.restore();
});
});
it('should create the test data correctly', function (done) {
// creates a test tag
should.exist(tagJSON);
tagJSON.should.be.an.Array().with.lengthOf(3);
tagJSON[0].name.should.eql('existing tag a');
tagJSON[1].name.should.eql('existing-tag-b');
tagJSON[2].name.should.eql('existing_tag_c');
// creates a test post with an array of tags in the correct order
should.exist(postJSON);
postJSON.title.should.eql('HTML Ipsum');
should.exist(postJSON.tags);
postJSON.tags.should.be.an.Array().and.have.lengthOf(3);
postJSON.tags[0].name.should.eql('tag1');
postJSON.tags[1].name.should.eql('tag2');
postJSON.tags[2].name.should.eql('tag3');
done();
});
it('can edit slug of existing tag', function () {
const newJSON = _.cloneDeep(postJSON);
// Add an existing tag to the beginning of the array
newJSON.tags = [{id: postJSON.tags[0].id, slug: 'eins'}];
// Edit the post
return models.Post.edit(newJSON, editOptions).then(function (updatedPost) {
Sorted out the mixed usages of `include` and `withRelated` (#9425) no issue - this commit cleans up the usages of `include` and `withRelated`. ### API layer (`include`) - as request parameter e.g. `?include=roles,tags` - as theme API parameter e.g. `{{get .... include="author"}}` - as internal API access e.g. `api.posts.browse({include: 'author,tags'})` - the `include` notation is more readable than `withRelated` - and it allows us to use a different easier format (comma separated list) - the API utility transforms these more readable properties into model style (or into Ghost style) ### Model access (`withRelated`) - e.g. `models.Post.findPage({withRelated: ['tags']})` - driven by bookshelf --- Commits explained. * Reorder the usage of `convertOptions` - 1. validation - 2. options convertion - 3. permissions - the reason is simple, the permission layer access the model layer - we have to prepare the options before talking to the model layer - added `convertOptions` where it was missed (not required, but for consistency reasons) * Use `withRelated` when accessing the model layer and use `include` when accessing the API layer * Change `convertOptions` API utiliy - API Usage - ghost.api(..., {include: 'tags,authors'}) - `include` should only be used when calling the API (either via request or via manual usage) - `include` is only for readability and easier format - Ghost (Model Layer Usage) - models.Post.findOne(..., {withRelated: ['tags', 'authors']}) - should only use `withRelated` - model layer cannot read 'tags,authors` - model layer has no idea what `include` means, speaks a different language - `withRelated` is bookshelf - internal usage * include-count plugin: use `withRelated` instead of `include` - imagine you outsource this plugin to git and publish it to npm - `include` is an unknown option in bookshelf * Updated `permittedOptions` in base model - `include` is no longer a known option * Remove all occurances of `include` in the model layer * Extend `filterOptions` base function - this function should be called as first action - we clone the unfiltered options - check if you are using `include` (this is a protection which could help us in the beginning) - check for permitted and (later on default `withRelated`) options - the usage is coming in next commit * Ensure we call `filterOptions` as first action - use `ghostBookshelf.Model.filterOptions` as first action - consistent naming pattern for incoming options: `unfilteredOptions` - re-added allowed options for `toJSON` - one unsolved architecture problem: - if you override a function e.g. `edit` - then you should call `filterOptions` as first action - the base implementation of e.g. `edit` will call it again - future improvement * Removed `findOne` from Invite model - no longer needed, the base implementation is the same
2018-02-15 12:53:53 +03:00
updatedPost = updatedPost.toJSON({withRelated: ['tags']});
updatedPost.tags.should.have.lengthOf(1);
updatedPost.tags[0].name.should.eql(postJSON.tags[0].name);
updatedPost.tags[0].slug.should.eql('eins');
updatedPost.tags[0].id.should.eql(postJSON.tags[0].id);
});
});
it('can\'t edit dates and authors of existing tag', function () {
const newJSON = _.cloneDeep(postJSON);
let updatedAtFormat;
let createdAtFormat;
// Add an existing tag to the beginning of the array
🐛 Fixed `updated_at` not being updated (#9532) closes #9520 - it contains a dependency bump of the latest Bookshelf release - Bookshelf introduced a bug in the last release - see https://github.com/bookshelf/bookshelf/pull/1583 - see https://github.com/bookshelf/bookshelf/pull/1798 - this has caused trouble in Ghost - the `updated_at` attribute was not automatically set anymore --- The bookshelf added one breaking change: it's allow to pass custom `updated_at` and `created_at`. We already have a protection for not being able to override the `created_at` date on update. We had to add another protection to now allow to only change the `updated_at` property. You can only change `updated_at` if you actually change something else e.g. the title of a post. To be able to implement this check i discovered that Bookshelfs `model.changed` object has a tricky behaviour. It remembers **all** attributes, which where changed, doesn't matter if they are valid or invalid model properties. We had to add a line of code to avoid remembering none valid model attributes in this object. e.g. you change `tag.parent` (no valid model attribute). The valid property is `tag.parent_id`. If you pass `tag.parent` but the value has **not** changed (`tag.parent` === `tag.parent_id`), it will output you `tag.changed.parent`. But this is wrong. Bookshelf detects `changed` attributes too early. Or if you think the other way around, Ghost detects valid attributes too late. But the current earliest possible stage is the `onSaving` event, there is no earlier way to pick valid attributes (except of `.forge`, but we don't use this fn ATM). Later: the API should transform `tag.parent` into `tag.parent_id`, but we are not using it ATM, so no need to pre-optimise. The API already transforms `post.author` into `post.author_id`.
2018-03-26 16:12:02 +03:00
newJSON.tags = [_.cloneDeep(postJSON.tags[0])];
newJSON.tags[0].created_at = moment().add(2, 'days').format('YYYY-MM-DD HH:mm:ss');
newJSON.tags[0].updated_at = moment().add(2, 'days').format('YYYY-MM-DD HH:mm:ss');
// NOTE: this is currently only removed in the API layer
newJSON.tags[0].parent_id = newJSON.tags[0].parent;
delete newJSON.tags[0].parent;
// Edit the post
return Promise.delay(1000)
.then(function () {
return models.Post.edit(newJSON, editOptions);
})
.then(function (updatedPost) {
Sorted out the mixed usages of `include` and `withRelated` (#9425) no issue - this commit cleans up the usages of `include` and `withRelated`. ### API layer (`include`) - as request parameter e.g. `?include=roles,tags` - as theme API parameter e.g. `{{get .... include="author"}}` - as internal API access e.g. `api.posts.browse({include: 'author,tags'})` - the `include` notation is more readable than `withRelated` - and it allows us to use a different easier format (comma separated list) - the API utility transforms these more readable properties into model style (or into Ghost style) ### Model access (`withRelated`) - e.g. `models.Post.findPage({withRelated: ['tags']})` - driven by bookshelf --- Commits explained. * Reorder the usage of `convertOptions` - 1. validation - 2. options convertion - 3. permissions - the reason is simple, the permission layer access the model layer - we have to prepare the options before talking to the model layer - added `convertOptions` where it was missed (not required, but for consistency reasons) * Use `withRelated` when accessing the model layer and use `include` when accessing the API layer * Change `convertOptions` API utiliy - API Usage - ghost.api(..., {include: 'tags,authors'}) - `include` should only be used when calling the API (either via request or via manual usage) - `include` is only for readability and easier format - Ghost (Model Layer Usage) - models.Post.findOne(..., {withRelated: ['tags', 'authors']}) - should only use `withRelated` - model layer cannot read 'tags,authors` - model layer has no idea what `include` means, speaks a different language - `withRelated` is bookshelf - internal usage * include-count plugin: use `withRelated` instead of `include` - imagine you outsource this plugin to git and publish it to npm - `include` is an unknown option in bookshelf * Updated `permittedOptions` in base model - `include` is no longer a known option * Remove all occurances of `include` in the model layer * Extend `filterOptions` base function - this function should be called as first action - we clone the unfiltered options - check if you are using `include` (this is a protection which could help us in the beginning) - check for permitted and (later on default `withRelated`) options - the usage is coming in next commit * Ensure we call `filterOptions` as first action - use `ghostBookshelf.Model.filterOptions` as first action - consistent naming pattern for incoming options: `unfilteredOptions` - re-added allowed options for `toJSON` - one unsolved architecture problem: - if you override a function e.g. `edit` - then you should call `filterOptions` as first action - the base implementation of e.g. `edit` will call it again - future improvement * Removed `findOne` from Invite model - no longer needed, the base implementation is the same
2018-02-15 12:53:53 +03:00
updatedPost = updatedPost.toJSON({withRelated: ['tags']});
updatedPost.tags.should.have.lengthOf(1);
updatedPost.tags[0].should.have.properties({
name: postJSON.tags[0].name,
🐛 Fixed `updated_at` not being updated (#9532) closes #9520 - it contains a dependency bump of the latest Bookshelf release - Bookshelf introduced a bug in the last release - see https://github.com/bookshelf/bookshelf/pull/1583 - see https://github.com/bookshelf/bookshelf/pull/1798 - this has caused trouble in Ghost - the `updated_at` attribute was not automatically set anymore --- The bookshelf added one breaking change: it's allow to pass custom `updated_at` and `created_at`. We already have a protection for not being able to override the `created_at` date on update. We had to add another protection to now allow to only change the `updated_at` property. You can only change `updated_at` if you actually change something else e.g. the title of a post. To be able to implement this check i discovered that Bookshelfs `model.changed` object has a tricky behaviour. It remembers **all** attributes, which where changed, doesn't matter if they are valid or invalid model properties. We had to add a line of code to avoid remembering none valid model attributes in this object. e.g. you change `tag.parent` (no valid model attribute). The valid property is `tag.parent_id`. If you pass `tag.parent` but the value has **not** changed (`tag.parent` === `tag.parent_id`), it will output you `tag.changed.parent`. But this is wrong. Bookshelf detects `changed` attributes too early. Or if you think the other way around, Ghost detects valid attributes too late. But the current earliest possible stage is the `onSaving` event, there is no earlier way to pick valid attributes (except of `.forge`, but we don't use this fn ATM). Later: the API should transform `tag.parent` into `tag.parent_id`, but we are not using it ATM, so no need to pre-optimise. The API already transforms `post.author` into `post.author_id`.
2018-03-26 16:12:02 +03:00
slug: postJSON.tags[0].slug,
id: postJSON.tags[0].id,
🐛 Fixed `updated_at` not being updated (#9532) closes #9520 - it contains a dependency bump of the latest Bookshelf release - Bookshelf introduced a bug in the last release - see https://github.com/bookshelf/bookshelf/pull/1583 - see https://github.com/bookshelf/bookshelf/pull/1798 - this has caused trouble in Ghost - the `updated_at` attribute was not automatically set anymore --- The bookshelf added one breaking change: it's allow to pass custom `updated_at` and `created_at`. We already have a protection for not being able to override the `created_at` date on update. We had to add another protection to now allow to only change the `updated_at` property. You can only change `updated_at` if you actually change something else e.g. the title of a post. To be able to implement this check i discovered that Bookshelfs `model.changed` object has a tricky behaviour. It remembers **all** attributes, which where changed, doesn't matter if they are valid or invalid model properties. We had to add a line of code to avoid remembering none valid model attributes in this object. e.g. you change `tag.parent` (no valid model attribute). The valid property is `tag.parent_id`. If you pass `tag.parent` but the value has **not** changed (`tag.parent` === `tag.parent_id`), it will output you `tag.changed.parent`. But this is wrong. Bookshelf detects `changed` attributes too early. Or if you think the other way around, Ghost detects valid attributes too late. But the current earliest possible stage is the `onSaving` event, there is no earlier way to pick valid attributes (except of `.forge`, but we don't use this fn ATM). Later: the API should transform `tag.parent` into `tag.parent_id`, but we are not using it ATM, so no need to pre-optimise. The API already transforms `post.author` into `post.author_id`.
2018-03-26 16:12:02 +03:00
created_by: postJSON.tags[0].created_by,
updated_by: postJSON.tags[0].updated_by
});
updatedAtFormat = moment(updatedPost.tags[0].updated_at).format('YYYY-MM-DD HH:mm:ss');
🐛 Fixed `updated_at` not being updated (#9532) closes #9520 - it contains a dependency bump of the latest Bookshelf release - Bookshelf introduced a bug in the last release - see https://github.com/bookshelf/bookshelf/pull/1583 - see https://github.com/bookshelf/bookshelf/pull/1798 - this has caused trouble in Ghost - the `updated_at` attribute was not automatically set anymore --- The bookshelf added one breaking change: it's allow to pass custom `updated_at` and `created_at`. We already have a protection for not being able to override the `created_at` date on update. We had to add another protection to now allow to only change the `updated_at` property. You can only change `updated_at` if you actually change something else e.g. the title of a post. To be able to implement this check i discovered that Bookshelfs `model.changed` object has a tricky behaviour. It remembers **all** attributes, which where changed, doesn't matter if they are valid or invalid model properties. We had to add a line of code to avoid remembering none valid model attributes in this object. e.g. you change `tag.parent` (no valid model attribute). The valid property is `tag.parent_id`. If you pass `tag.parent` but the value has **not** changed (`tag.parent` === `tag.parent_id`), it will output you `tag.changed.parent`. But this is wrong. Bookshelf detects `changed` attributes too early. Or if you think the other way around, Ghost detects valid attributes too late. But the current earliest possible stage is the `onSaving` event, there is no earlier way to pick valid attributes (except of `.forge`, but we don't use this fn ATM). Later: the API should transform `tag.parent` into `tag.parent_id`, but we are not using it ATM, so no need to pre-optimise. The API already transforms `post.author` into `post.author_id`.
2018-03-26 16:12:02 +03:00
updatedAtFormat.should.eql(moment(postJSON.tags[0].updated_at).format('YYYY-MM-DD HH:mm:ss'));
updatedAtFormat.should.not.eql(moment(newJSON.tags[0].updated_at).format('YYYY-MM-DD HH:mm:ss'));
🐛 Fixed `updated_at` not being updated (#9532) closes #9520 - it contains a dependency bump of the latest Bookshelf release - Bookshelf introduced a bug in the last release - see https://github.com/bookshelf/bookshelf/pull/1583 - see https://github.com/bookshelf/bookshelf/pull/1798 - this has caused trouble in Ghost - the `updated_at` attribute was not automatically set anymore --- The bookshelf added one breaking change: it's allow to pass custom `updated_at` and `created_at`. We already have a protection for not being able to override the `created_at` date on update. We had to add another protection to now allow to only change the `updated_at` property. You can only change `updated_at` if you actually change something else e.g. the title of a post. To be able to implement this check i discovered that Bookshelfs `model.changed` object has a tricky behaviour. It remembers **all** attributes, which where changed, doesn't matter if they are valid or invalid model properties. We had to add a line of code to avoid remembering none valid model attributes in this object. e.g. you change `tag.parent` (no valid model attribute). The valid property is `tag.parent_id`. If you pass `tag.parent` but the value has **not** changed (`tag.parent` === `tag.parent_id`), it will output you `tag.changed.parent`. But this is wrong. Bookshelf detects `changed` attributes too early. Or if you think the other way around, Ghost detects valid attributes too late. But the current earliest possible stage is the `onSaving` event, there is no earlier way to pick valid attributes (except of `.forge`, but we don't use this fn ATM). Later: the API should transform `tag.parent` into `tag.parent_id`, but we are not using it ATM, so no need to pre-optimise. The API already transforms `post.author` into `post.author_id`.
2018-03-26 16:12:02 +03:00
createdAtFormat = moment(updatedPost.tags[0].created_at).format('YYYY-MM-DD HH:mm:ss');
createdAtFormat.should.eql(moment(postJSON.tags[0].created_at).format('YYYY-MM-DD HH:mm:ss'));
createdAtFormat.should.not.eql(moment(newJSON.tags[0].created_at).format('YYYY-MM-DD HH:mm:ss'));
});
});
it('can reorder existing, added and deleted tags', function () {
const newJSON = _.cloneDeep(postJSON);
const lastTag = [postJSON.tags[2]];
// remove tag in the middle (tag1, tag2, tag3 -> tag1, tag3)
newJSON.tags.splice(1, 1);
// add a new one as first tag and reorder existing (tag4, tag3, tag1)
newJSON.tags = [{name: 'tag4'}].concat([newJSON.tags[1]]).concat([newJSON.tags[0]]);
// Edit the post
return models.Post.edit(newJSON, editOptions).then(function (updatedPost) {
Sorted out the mixed usages of `include` and `withRelated` (#9425) no issue - this commit cleans up the usages of `include` and `withRelated`. ### API layer (`include`) - as request parameter e.g. `?include=roles,tags` - as theme API parameter e.g. `{{get .... include="author"}}` - as internal API access e.g. `api.posts.browse({include: 'author,tags'})` - the `include` notation is more readable than `withRelated` - and it allows us to use a different easier format (comma separated list) - the API utility transforms these more readable properties into model style (or into Ghost style) ### Model access (`withRelated`) - e.g. `models.Post.findPage({withRelated: ['tags']})` - driven by bookshelf --- Commits explained. * Reorder the usage of `convertOptions` - 1. validation - 2. options convertion - 3. permissions - the reason is simple, the permission layer access the model layer - we have to prepare the options before talking to the model layer - added `convertOptions` where it was missed (not required, but for consistency reasons) * Use `withRelated` when accessing the model layer and use `include` when accessing the API layer * Change `convertOptions` API utiliy - API Usage - ghost.api(..., {include: 'tags,authors'}) - `include` should only be used when calling the API (either via request or via manual usage) - `include` is only for readability and easier format - Ghost (Model Layer Usage) - models.Post.findOne(..., {withRelated: ['tags', 'authors']}) - should only use `withRelated` - model layer cannot read 'tags,authors` - model layer has no idea what `include` means, speaks a different language - `withRelated` is bookshelf - internal usage * include-count plugin: use `withRelated` instead of `include` - imagine you outsource this plugin to git and publish it to npm - `include` is an unknown option in bookshelf * Updated `permittedOptions` in base model - `include` is no longer a known option * Remove all occurances of `include` in the model layer * Extend `filterOptions` base function - this function should be called as first action - we clone the unfiltered options - check if you are using `include` (this is a protection which could help us in the beginning) - check for permitted and (later on default `withRelated`) options - the usage is coming in next commit * Ensure we call `filterOptions` as first action - use `ghostBookshelf.Model.filterOptions` as first action - consistent naming pattern for incoming options: `unfilteredOptions` - re-added allowed options for `toJSON` - one unsolved architecture problem: - if you override a function e.g. `edit` - then you should call `filterOptions` as first action - the base implementation of e.g. `edit` will call it again - future improvement * Removed `findOne` from Invite model - no longer needed, the base implementation is the same
2018-02-15 12:53:53 +03:00
updatedPost = updatedPost.toJSON({withRelated: ['tags']});
updatedPost.tags.should.have.lengthOf(3);
updatedPost.tags[0].should.have.properties({
name: 'tag4'
});
updatedPost.tags[1].should.have.properties({
name: 'tag3',
id: postJSON.tags[2].id
});
updatedPost.tags[2].should.have.properties({
name: 'tag1',
id: postJSON.tags[0].id
});
});
});
it('can add multiple tags with conflicting slugs', function () {
const newJSON = _.cloneDeep(postJSON);
// Add conflicting tags to the end of the array
newJSON.tags = [];
newJSON.tags.push({name: 'C'});
newJSON.tags.push({name: 'C++'});
newJSON.tags.push({name: 'C#'});
// Edit the post
return models.Post.edit(newJSON, editOptions).then(function (updatedPost) {
Sorted out the mixed usages of `include` and `withRelated` (#9425) no issue - this commit cleans up the usages of `include` and `withRelated`. ### API layer (`include`) - as request parameter e.g. `?include=roles,tags` - as theme API parameter e.g. `{{get .... include="author"}}` - as internal API access e.g. `api.posts.browse({include: 'author,tags'})` - the `include` notation is more readable than `withRelated` - and it allows us to use a different easier format (comma separated list) - the API utility transforms these more readable properties into model style (or into Ghost style) ### Model access (`withRelated`) - e.g. `models.Post.findPage({withRelated: ['tags']})` - driven by bookshelf --- Commits explained. * Reorder the usage of `convertOptions` - 1. validation - 2. options convertion - 3. permissions - the reason is simple, the permission layer access the model layer - we have to prepare the options before talking to the model layer - added `convertOptions` where it was missed (not required, but for consistency reasons) * Use `withRelated` when accessing the model layer and use `include` when accessing the API layer * Change `convertOptions` API utiliy - API Usage - ghost.api(..., {include: 'tags,authors'}) - `include` should only be used when calling the API (either via request or via manual usage) - `include` is only for readability and easier format - Ghost (Model Layer Usage) - models.Post.findOne(..., {withRelated: ['tags', 'authors']}) - should only use `withRelated` - model layer cannot read 'tags,authors` - model layer has no idea what `include` means, speaks a different language - `withRelated` is bookshelf - internal usage * include-count plugin: use `withRelated` instead of `include` - imagine you outsource this plugin to git and publish it to npm - `include` is an unknown option in bookshelf * Updated `permittedOptions` in base model - `include` is no longer a known option * Remove all occurances of `include` in the model layer * Extend `filterOptions` base function - this function should be called as first action - we clone the unfiltered options - check if you are using `include` (this is a protection which could help us in the beginning) - check for permitted and (later on default `withRelated`) options - the usage is coming in next commit * Ensure we call `filterOptions` as first action - use `ghostBookshelf.Model.filterOptions` as first action - consistent naming pattern for incoming options: `unfilteredOptions` - re-added allowed options for `toJSON` - one unsolved architecture problem: - if you override a function e.g. `edit` - then you should call `filterOptions` as first action - the base implementation of e.g. `edit` will call it again - future improvement * Removed `findOne` from Invite model - no longer needed, the base implementation is the same
2018-02-15 12:53:53 +03:00
updatedPost = updatedPost.toJSON({withRelated: ['tags']});
updatedPost.tags.should.have.lengthOf(3);
updatedPost.tags[0].should.have.properties({name: 'C', slug: 'c'});
updatedPost.tags[1].should.have.properties({name: 'C++', slug: 'c-2'});
updatedPost.tags[2].should.have.properties({name: 'C#', slug: 'c-3'});
});
});
it('can handle lowercase/uppercase tags', function () {
const newJSON = _.cloneDeep(postJSON);
// Add conflicting tags to the end of the array
newJSON.tags = [];
newJSON.tags.push({name: 'test'});
newJSON.tags.push({name: 'tEst'});
// Edit the post
return models.Post.edit(newJSON, editOptions).then(function (updatedPost) {
Sorted out the mixed usages of `include` and `withRelated` (#9425) no issue - this commit cleans up the usages of `include` and `withRelated`. ### API layer (`include`) - as request parameter e.g. `?include=roles,tags` - as theme API parameter e.g. `{{get .... include="author"}}` - as internal API access e.g. `api.posts.browse({include: 'author,tags'})` - the `include` notation is more readable than `withRelated` - and it allows us to use a different easier format (comma separated list) - the API utility transforms these more readable properties into model style (or into Ghost style) ### Model access (`withRelated`) - e.g. `models.Post.findPage({withRelated: ['tags']})` - driven by bookshelf --- Commits explained. * Reorder the usage of `convertOptions` - 1. validation - 2. options convertion - 3. permissions - the reason is simple, the permission layer access the model layer - we have to prepare the options before talking to the model layer - added `convertOptions` where it was missed (not required, but for consistency reasons) * Use `withRelated` when accessing the model layer and use `include` when accessing the API layer * Change `convertOptions` API utiliy - API Usage - ghost.api(..., {include: 'tags,authors'}) - `include` should only be used when calling the API (either via request or via manual usage) - `include` is only for readability and easier format - Ghost (Model Layer Usage) - models.Post.findOne(..., {withRelated: ['tags', 'authors']}) - should only use `withRelated` - model layer cannot read 'tags,authors` - model layer has no idea what `include` means, speaks a different language - `withRelated` is bookshelf - internal usage * include-count plugin: use `withRelated` instead of `include` - imagine you outsource this plugin to git and publish it to npm - `include` is an unknown option in bookshelf * Updated `permittedOptions` in base model - `include` is no longer a known option * Remove all occurances of `include` in the model layer * Extend `filterOptions` base function - this function should be called as first action - we clone the unfiltered options - check if you are using `include` (this is a protection which could help us in the beginning) - check for permitted and (later on default `withRelated`) options - the usage is coming in next commit * Ensure we call `filterOptions` as first action - use `ghostBookshelf.Model.filterOptions` as first action - consistent naming pattern for incoming options: `unfilteredOptions` - re-added allowed options for `toJSON` - one unsolved architecture problem: - if you override a function e.g. `edit` - then you should call `filterOptions` as first action - the base implementation of e.g. `edit` will call it again - future improvement * Removed `findOne` from Invite model - no longer needed, the base implementation is the same
2018-02-15 12:53:53 +03:00
updatedPost = updatedPost.toJSON({withRelated: ['tags']});
updatedPost.tags.should.have.lengthOf(1);
});
});
});
// disabling sanitization until we can implement a better version
// it('should sanitize the title', function (done) {
// new models.Post().fetch().then(function (model) {
// return model.set({'title': "</title></head><body><script>alert('blogtitle');</script>"}).save();
// }).then(function (saved) {
// saved.get('title').should.eql("&lt;/title&gt;&lt;/head>&lt;body&gt;[removed]alert&#40;'blogtitle'&#41;;[removed]");
// done();
// }).catch(done);
// });
});