Added more webhooks & changed payload

refs #10461

- do not break the existing webhooks by keeping both payload formats for subscribers events
- refactored webhooks service to run models through target API version
- added new events described in the target issue reference
- this refactoring & enhancement is undocumented, further breaking changes will happen because we are actively working on: https://github.com/TryGhost/Ghost/issues/10438
This commit is contained in:
kirrg001 2019-02-03 13:36:08 +01:00 committed by Katharina Irrgang
parent a83aa6e184
commit b83232bf0c
11 changed files with 229 additions and 225 deletions

View File

@ -1,3 +1,4 @@
module.exports = require('./v0.1'); module.exports = require('./v0.1');
module.exports['v0.1'] = require('./v0.1'); module.exports['v0.1'] = require('./v0.1');
module.exports.v2 = require('./v2'); module.exports.v2 = require('./v2');
module.exports.shared = require('./shared');

View File

@ -113,5 +113,9 @@ module.exports = {
get actions() { get actions() {
return shared.pipeline(require('./actions'), localUtils); return shared.pipeline(require('./actions'), localUtils);
},
get serializers() {
return require('./utils/serializers');
} }
}; };

View File

@ -599,9 +599,11 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
const attrs = {}; const attrs = {};
const relations = {}; const relations = {};
_.each(Object.keys(this._previousRelations), (key) => { if (this._previousRelations) {
relations[key] = this._previousRelations[key].toJSON(); _.each(Object.keys(this._previousRelations), (key) => {
}); relations[key] = this._previousRelations[key].toJSON();
});
}
Object.assign(attrs, this._previousAttributes, relations); Object.assign(attrs, this._previousAttributes, relations);
return attrs; return attrs;

View File

@ -208,12 +208,16 @@ Post = ghostBookshelf.Model.extend({
model.related('tags').once('detaching', function onDetached(collection, tag) { model.related('tags').once('detaching', function onDetached(collection, tag) {
model.related('tags').once('detached', function onDetached(detachedCollection, response, options) { model.related('tags').once('detached', function onDetached(detachedCollection, response, options) {
tag.emitChange('detached', options); tag.emitChange('detached', options);
model.emitChange('tag.detached', options);
}); });
}); });
model.related('tags').once('attaching', function onDetached(collection, tags) { model.related('tags').once('attaching', function onDetached(collection, tags) {
model.related('tags').once('attached', function onDetached(detachedCollection, response, options) { model.related('tags').once('attached', function onDetached(detachedCollection, response, options) {
tags.forEach(tag => tag.emitChange('attached', options)); tags.forEach((tag) => {
tag.emitChange('attached', options);
model.emitChange('tag.attached', options);
});
}); });
}); });

View File

@ -1,9 +1,5 @@
module.exports = { module.exports = {
get listen() { get listen() {
return require('./listen'); return require('./listen');
},
get trigger() {
return require('./trigger');
} }
}; };

View File

@ -1,60 +1,53 @@
const _ = require('lodash'); const _ = require('lodash');
const common = require('../../lib/common'); const common = require('../../lib/common');
const webhooks = require('./index'); const trigger = require('./trigger');
let modelAttrs;
// TODO: this can be removed once all events pass a .toJSON object through const WEBHOOKS = [
modelAttrs = { 'subscriber.added',
subscriber: ['id', 'name', 'email'] 'subscriber.deleted',
'site.changed',
'post.added',
'post.deleted',
'post.edited',
'post.published',
'post.published.edited',
'post.unpublished',
'post.scheduled',
'post.unscheduled',
'post.rescheduled',
'page.added',
'page.deleted',
'page.edited',
'page.published',
'page.published.edited',
'page.unpublished',
'page.scheduled',
'page.unscheduled',
'page.rescheduled',
'tag.added',
'tag.edited',
'tag.deleted',
'post.tag.attached',
'post.tag.detached',
'page.tag.attached',
'page.tag.detached'
];
const listen = () => {
_.each(WEBHOOKS, (event) => {
common.events.on(event, (model, options) => {
// CASE: avoid triggering webhooks when importing
if (options && options.importing) {
return;
}
trigger(event, model);
});
});
}; };
// TODO: this works for basic models but we eventually want a full API response
// with embedded models (?include=tags) and so on
function generatePayload(event, model) {
const modelName = event.split('.')[0];
const pluralModelName = modelName + 's';
const action = event.split('.')[1];
const payload = {};
let data;
if (action === 'deleted') {
data = {};
modelAttrs[modelName].forEach((key) => {
if (model._previousAttributes[key] !== undefined) {
data[key] = model._previousAttributes[key];
}
});
} else {
data = model.toJSON();
}
payload[pluralModelName] = [data];
return payload;
}
function listener(event, model, options) {
let payload = {};
if (model) {
payload = generatePayload(event, model);
}
payload.event = event;
// avoid triggering webhooks when importing
if (options && options.importing) {
return;
}
webhooks.trigger(event, payload, options);
}
// TODO: use a wildcard with the new event emitter or use the webhooks API to
// register listeners only for events that have webhooks
function listen() {
common.events.on('subscriber.added', _.partial(listener, 'subscriber.added'));
common.events.on('subscriber.deleted', _.partial(listener, 'subscriber.deleted'));
common.events.on('site.changed', _.partial(listener, 'site.changed'));
}
// Public API
module.exports = listen; module.exports = listen;

View File

@ -0,0 +1,11 @@
const serialize = require('./serialize');
module.exports = (event, model) => {
const payload = {};
if (model) {
Object.assign(payload, serialize(event, model));
}
return payload;
};

View File

@ -0,0 +1,73 @@
module.exports = (event, model) => {
const _ = require('lodash');
const sequence = require('../../lib/promise/sequence');
const api = require('../../api');
const apiVersion = model.get('api_version') || 'v2';
const docName = model.tableName;
const ops = [];
if (Object.keys(model.attributes).length) {
let frame = {options: {previous: false, context: {user: true}}};
ops.push(() => {
return api.shared
.serializers
.handle
.output(model, {docName: docName, method: 'read'}, api[apiVersion].serializers.output, frame)
.then(() => {
return frame.response[docName][0];
});
});
} else {
ops.push(() => {
return Promise.resolve({});
});
}
if (Object.keys(model._previousAttributes).length) {
ops.push(() => {
const frame = {options: {previous: true, context: {user: true}}};
return api.shared
.serializers
.handle
.output(model, {docName: docName, method: 'read'}, api[apiVersion].serializers.output, frame)
.then(() => {
return frame.response[docName][0];
});
});
} else {
ops.push(() => {
return Promise.resolve({});
});
}
sequence(ops)
.then((results) => {
const current = results[0];
const previous = results[1];
const changed = model._changed ? Object.keys(model._changed) : {};
const payload = {
[docName.replace(/s$/, '')]: {
current: current,
previous: _.pick(previous, changed)
}
};
// @TODO: remove in v3
// @NOTE: Our webhook format has changed, we still have to support the old format for subscribers events
if ('subscriber.added' === event) {
payload[docName] = [current];
}
if ('subscriber.deleted' === event) {
payload[docName] = [previous];
}
return payload;
});
};

View File

@ -1,69 +1,90 @@
const _ = require('lodash'); const _ = require('lodash');
const debug = require('ghost-ignition').debug('services:webhooks:trigger');
const common = require('../../lib/common'); const common = require('../../lib/common');
const models = require('../../models');
const pipeline = require('../../../server/lib/promise/pipeline');
const request = require('../../../server/lib/request'); const request = require('../../../server/lib/request');
const models = require('../../models');
const payload = require('./payload');
function updateWebhookTriggerData(id, data) { const webhooks = {
models.Webhook.edit(data, {id: id}).catch(() => { getAll(event) {
common.logging.warn(`Unable to update last_triggered for webhook: ${id}`); return models
}); .Webhook
} .findAllByEvent(event, {context: {internal: true}});
},
function makeRequests(webhooksCollection, payload, options) { update(webhook, data) {
_.each(webhooksCollection.models, (webhook) => { models
const event = webhook.get('event'); .Webhook
const targetUrl = webhook.get('target_url'); .edit({
const webhookId = webhook.get('id'); last_triggered_at: Date.now(),
const reqPayload = JSON.stringify(payload); last_triggered_status: data.statusCode,
last_triggered_error: data.error || null
common.logging.info('webhook.trigger', event, targetUrl); }, {id: webhook.id})
const triggeredAt = Date.now(); .catch(() => {
request(targetUrl, { common.logging.warn(`Unable to update "last_triggered" for webhook: ${webhook.id}`);
body: reqPayload,
headers: {
'Content-Length': Buffer.byteLength(reqPayload),
'Content-Type': 'application/json'
},
timeout: 2 * 1000,
retries: 5
}).then((res) => {
updateWebhookTriggerData(webhookId, {
last_triggered_at: triggeredAt,
last_triggered_status: res.statusCode
}); });
}).catch((err) => { },
// when a webhook responds with a 410 Gone response we should remove the hook
if (err.statusCode === 410) { destroy(webhook) {
common.logging.info('webhook.destroy (410 response)', event, targetUrl); return models
return models.Webhook.destroy({id: webhookId}, options).catch(() => { .Webhook
common.logging.warn(`Unable to destroy webhook ${webhookId}`); .destroy({id: webhook.id}, {context: {internal: true}})
}); .catch(() => {
} common.logging.warn(`Unable to destroy webhook ${webhook.id}.`);
let lastTriggeredError = err.statusCode ? '' : `Request failed: ${err.code || ''}`;
updateWebhookTriggerData(webhookId, {
last_triggered_at: triggeredAt,
last_triggered_status: err.statusCode,
last_triggered_error: lastTriggeredError
}); });
common.logging.warn(`Request to ${targetUrl} failed ${err.code || ''}.`);
});
});
}
function trigger(event, payload, options) {
let tasks;
function doQuery(options) {
return models.Webhook.findAllByEvent(event, options);
} }
};
tasks = [ const response = {
doQuery, onSuccess(webhook) {
_.partialRight(makeRequests, payload, options) return (res) => {
]; webhooks.update(webhook, {
statusCode: res.statusCode
});
};
},
return pipeline(tasks, options); onError(webhook) {
} return (err) => {
if (err.statusCode === 410) {
common.logging.info(`Webhook destroyed (410 response) for "${webhook.get('event')}" with url "${webhook.get('target_url')}".`);
module.exports = trigger; return webhooks.destroy(webhook);
}
webhooks.update(webhook, {
statusCode: err.statusCode,
error: `Request failed: ${err.code || 'unknown'}`
});
common.logging.warn(`Request to ${webhook.get('target_url') || null} failed because of: ${err.code || ''}.`);
};
}
};
module.exports = (event, model) => {
webhooks.getAll(event)
.then((webhooks) => {
debug(`${webhooks.models.length} webhooks found.`);
_.each(webhooks.models, (webhook) => {
const reqPayload = JSON.stringify(payload(webhook.get('event'), model));
const url = webhook.get('target_url');
const opts = {
body: reqPayload,
headers: {
'Content-Length': Buffer.byteLength(reqPayload),
'Content-Type': 'application/json'
},
timeout: 2 * 1000,
retries: 5
};
common.logging.info(`Trigger Webhook for "${webhook.get('event')}" with url "${url}".`);
request(url, opts)
.then(response.onSuccess(webhook))
.catch(response.onError(webhook));
});
});
};

View File

@ -1454,11 +1454,12 @@ describe('Post Model', function () {
should.equal(deleted.author, undefined); should.equal(deleted.author, undefined);
Object.keys(eventsTriggered).length.should.eql(4); Object.keys(eventsTriggered).length.should.eql(5);
should.exist(eventsTriggered['post.unpublished']); should.exist(eventsTriggered['post.unpublished']);
should.exist(eventsTriggered['post.deleted']); should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['user.detached']); should.exist(eventsTriggered['user.detached']);
should.exist(eventsTriggered['tag.detached']); should.exist(eventsTriggered['tag.detached']);
should.exist(eventsTriggered['post.tag.detached']);
// Double check we can't find the post again // Double check we can't find the post again
return models.Post.findOne(firstItemData); return models.Post.findOne(firstItemData);
@ -1494,9 +1495,10 @@ describe('Post Model', function () {
should.equal(deleted.author, undefined); should.equal(deleted.author, undefined);
Object.keys(eventsTriggered).length.should.eql(3); Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.deleted']); should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['tag.detached']); should.exist(eventsTriggered['tag.detached']);
should.exist(eventsTriggered['post.tag.detached']);
should.exist(eventsTriggered['user.detached']); should.exist(eventsTriggered['user.detached']);
// Double check we can't find the post again // Double check we can't find the post again

View File

@ -1,103 +0,0 @@
const _ = require('lodash');
const should = require('should');
const sinon = require('sinon');
const rewire = require('rewire');
const testUtils = require('../../utils');
const common = require('../../../server/lib/common');
// Stuff we test
const webhooks = {
listen: rewire('../../../server/services/webhooks/listen'),
trigger: rewire('../../../server/services/webhooks/trigger')
};
describe('Webhooks', function () {
var eventStub;
beforeEach(function () {
eventStub = sinon.stub(common.events, 'on');
});
afterEach(function () {
sinon.restore();
});
it('listen() should initialise events correctly', function () {
webhooks.listen();
eventStub.calledThrice.should.be.true();
});
it('listener() with "subscriber.added" event calls webhooks.trigger with toJSONified model', function () {
var testSubscriber = _.clone(testUtils.DataGenerator.Content.subscribers[0]),
testModel = {
toJSON: function () {
return testSubscriber;
}
},
webhooksStub = {
trigger: sinon.stub()
},
resetWebhooks = webhooks.listen.__set__('webhooks', webhooksStub),
listener = webhooks.listen.__get__('listener'),
triggerArgs;
listener('subscriber.added', testModel);
webhooksStub.trigger.calledOnce.should.be.true();
triggerArgs = webhooksStub.trigger.getCall(0).args;
triggerArgs[0].should.eql('subscriber.added');
triggerArgs[1].should.deepEqual({
subscribers: [testSubscriber],
event: 'subscriber.added'
});
resetWebhooks();
});
it('listener() with "subscriber.deleted" event calls webhooks.trigger with _previousAttributes values', function () {
var testSubscriber = _.clone(testUtils.DataGenerator.Content.subscribers[1]),
testModel = {
_previousAttributes: testSubscriber
},
webhooksStub = {
trigger: sinon.stub()
},
resetWebhooks = webhooks.listen.__set__('webhooks', webhooksStub),
listener = webhooks.listen.__get__('listener'),
triggerArgs;
listener('subscriber.deleted', testModel);
webhooksStub.trigger.calledOnce.should.be.true();
triggerArgs = webhooksStub.trigger.getCall(0).args;
triggerArgs[0].should.eql('subscriber.deleted');
triggerArgs[1].should.deepEqual({
subscribers: [testSubscriber],
event: 'subscriber.deleted'
});
resetWebhooks();
});
it('listener() with "site.changed" event calls webhooks.trigger ', function () {
const webhooksStub = {
trigger: sinon.stub()
};
const resetWebhooks = webhooks.listen.__set__('webhooks', webhooksStub);
const listener = webhooks.listen.__get__('listener');
let triggerArgs;
listener('site.changed');
webhooksStub.trigger.calledOnce.should.be.true();
triggerArgs = webhooksStub.trigger.getCall(0).args;
triggerArgs[0].should.eql('site.changed');
triggerArgs[1].should.eql({
event: 'site.changed'
});
resetWebhooks();
});
});