Added more webhooks & changed payload

refs #10461

- do not break the existing webhooks by keeping both payload formats for subscribers events
- refactored webhooks service to run models through target API version
- added new events described in the target issue reference
- this refactoring & enhancement is undocumented, further breaking changes will happen because we are actively working on: https://github.com/TryGhost/Ghost/issues/10438
This commit is contained in:
kirrg001 2019-02-03 13:36:08 +01:00 committed by Katharina Irrgang
parent a83aa6e184
commit b83232bf0c
11 changed files with 229 additions and 225 deletions

View File

@ -1,3 +1,4 @@
module.exports = require('./v0.1'); module.exports = require('./v0.1');
module.exports['v0.1'] = require('./v0.1'); module.exports['v0.1'] = require('./v0.1');
module.exports.v2 = require('./v2'); module.exports.v2 = require('./v2');
module.exports.shared = require('./shared');

View File

@ -113,5 +113,9 @@ module.exports = {
get actions() { get actions() {
return shared.pipeline(require('./actions'), localUtils); return shared.pipeline(require('./actions'), localUtils);
},
get serializers() {
return require('./utils/serializers');
} }
}; };

View File

@ -599,9 +599,11 @@ ghostBookshelf.Model = ghostBookshelf.Model.extend({
const attrs = {}; const attrs = {};
const relations = {}; const relations = {};
if (this._previousRelations) {
_.each(Object.keys(this._previousRelations), (key) => { _.each(Object.keys(this._previousRelations), (key) => {
relations[key] = this._previousRelations[key].toJSON(); relations[key] = this._previousRelations[key].toJSON();
}); });
}
Object.assign(attrs, this._previousAttributes, relations); Object.assign(attrs, this._previousAttributes, relations);
return attrs; return attrs;

View File

@ -208,12 +208,16 @@ Post = ghostBookshelf.Model.extend({
model.related('tags').once('detaching', function onDetached(collection, tag) { model.related('tags').once('detaching', function onDetached(collection, tag) {
model.related('tags').once('detached', function onDetached(detachedCollection, response, options) { model.related('tags').once('detached', function onDetached(detachedCollection, response, options) {
tag.emitChange('detached', options); tag.emitChange('detached', options);
model.emitChange('tag.detached', options);
}); });
}); });
model.related('tags').once('attaching', function onDetached(collection, tags) { model.related('tags').once('attaching', function onDetached(collection, tags) {
model.related('tags').once('attached', function onDetached(detachedCollection, response, options) { model.related('tags').once('attached', function onDetached(detachedCollection, response, options) {
tags.forEach(tag => tag.emitChange('attached', options)); tags.forEach((tag) => {
tag.emitChange('attached', options);
model.emitChange('tag.attached', options);
});
}); });
}); });

View File

@ -1,9 +1,5 @@
module.exports = { module.exports = {
get listen() { get listen() {
return require('./listen'); return require('./listen');
},
get trigger() {
return require('./trigger');
} }
}; };

View File

@ -1,60 +1,53 @@
const _ = require('lodash'); const _ = require('lodash');
const common = require('../../lib/common'); const common = require('../../lib/common');
const webhooks = require('./index'); const trigger = require('./trigger');
let modelAttrs;
// TODO: this can be removed once all events pass a .toJSON object through const WEBHOOKS = [
modelAttrs = { 'subscriber.added',
subscriber: ['id', 'name', 'email'] 'subscriber.deleted',
}; 'site.changed',
// TODO: this works for basic models but we eventually want a full API response 'post.added',
// with embedded models (?include=tags) and so on 'post.deleted',
function generatePayload(event, model) { 'post.edited',
const modelName = event.split('.')[0]; 'post.published',
const pluralModelName = modelName + 's'; 'post.published.edited',
const action = event.split('.')[1]; 'post.unpublished',
const payload = {}; 'post.scheduled',
let data; 'post.unscheduled',
'post.rescheduled',
if (action === 'deleted') { 'page.added',
data = {}; 'page.deleted',
modelAttrs[modelName].forEach((key) => { 'page.edited',
if (model._previousAttributes[key] !== undefined) { 'page.published',
data[key] = model._previousAttributes[key]; 'page.published.edited',
} 'page.unpublished',
}); 'page.scheduled',
} else { 'page.unscheduled',
data = model.toJSON(); 'page.rescheduled',
}
payload[pluralModelName] = [data]; 'tag.added',
'tag.edited',
'tag.deleted',
return payload; 'post.tag.attached',
} 'post.tag.detached',
'page.tag.attached',
'page.tag.detached'
];
function listener(event, model, options) { const listen = () => {
let payload = {}; _.each(WEBHOOKS, (event) => {
if (model) { common.events.on(event, (model, options) => {
payload = generatePayload(event, model); // CASE: avoid triggering webhooks when importing
}
payload.event = event;
// avoid triggering webhooks when importing
if (options && options.importing) { if (options && options.importing) {
return; return;
} }
webhooks.trigger(event, payload, options); trigger(event, model);
} });
});
};
// TODO: use a wildcard with the new event emitter or use the webhooks API to
// register listeners only for events that have webhooks
function listen() {
common.events.on('subscriber.added', _.partial(listener, 'subscriber.added'));
common.events.on('subscriber.deleted', _.partial(listener, 'subscriber.deleted'));
common.events.on('site.changed', _.partial(listener, 'site.changed'));
}
// Public API
module.exports = listen; module.exports = listen;

View File

@ -0,0 +1,11 @@
const serialize = require('./serialize');
module.exports = (event, model) => {
const payload = {};
if (model) {
Object.assign(payload, serialize(event, model));
}
return payload;
};

View File

@ -0,0 +1,73 @@
module.exports = (event, model) => {
const _ = require('lodash');
const sequence = require('../../lib/promise/sequence');
const api = require('../../api');
const apiVersion = model.get('api_version') || 'v2';
const docName = model.tableName;
const ops = [];
if (Object.keys(model.attributes).length) {
let frame = {options: {previous: false, context: {user: true}}};
ops.push(() => {
return api.shared
.serializers
.handle
.output(model, {docName: docName, method: 'read'}, api[apiVersion].serializers.output, frame)
.then(() => {
return frame.response[docName][0];
});
});
} else {
ops.push(() => {
return Promise.resolve({});
});
}
if (Object.keys(model._previousAttributes).length) {
ops.push(() => {
const frame = {options: {previous: true, context: {user: true}}};
return api.shared
.serializers
.handle
.output(model, {docName: docName, method: 'read'}, api[apiVersion].serializers.output, frame)
.then(() => {
return frame.response[docName][0];
});
});
} else {
ops.push(() => {
return Promise.resolve({});
});
}
sequence(ops)
.then((results) => {
const current = results[0];
const previous = results[1];
const changed = model._changed ? Object.keys(model._changed) : {};
const payload = {
[docName.replace(/s$/, '')]: {
current: current,
previous: _.pick(previous, changed)
}
};
// @TODO: remove in v3
// @NOTE: Our webhook format has changed, we still have to support the old format for subscribers events
if ('subscriber.added' === event) {
payload[docName] = [current];
}
if ('subscriber.deleted' === event) {
payload[docName] = [previous];
}
return payload;
});
};

View File

@ -1,25 +1,76 @@
const _ = require('lodash'); const _ = require('lodash');
const debug = require('ghost-ignition').debug('services:webhooks:trigger');
const common = require('../../lib/common'); const common = require('../../lib/common');
const models = require('../../models');
const pipeline = require('../../../server/lib/promise/pipeline');
const request = require('../../../server/lib/request'); const request = require('../../../server/lib/request');
const models = require('../../models');
const payload = require('./payload');
function updateWebhookTriggerData(id, data) { const webhooks = {
models.Webhook.edit(data, {id: id}).catch(() => { getAll(event) {
common.logging.warn(`Unable to update last_triggered for webhook: ${id}`); return models
.Webhook
.findAllByEvent(event, {context: {internal: true}});
},
update(webhook, data) {
models
.Webhook
.edit({
last_triggered_at: Date.now(),
last_triggered_status: data.statusCode,
last_triggered_error: data.error || null
}, {id: webhook.id})
.catch(() => {
common.logging.warn(`Unable to update "last_triggered" for webhook: ${webhook.id}`);
});
},
destroy(webhook) {
return models
.Webhook
.destroy({id: webhook.id}, {context: {internal: true}})
.catch(() => {
common.logging.warn(`Unable to destroy webhook ${webhook.id}.`);
}); });
} }
};
function makeRequests(webhooksCollection, payload, options) { const response = {
_.each(webhooksCollection.models, (webhook) => { onSuccess(webhook) {
const event = webhook.get('event'); return (res) => {
const targetUrl = webhook.get('target_url'); webhooks.update(webhook, {
const webhookId = webhook.get('id'); statusCode: res.statusCode
const reqPayload = JSON.stringify(payload); });
};
},
common.logging.info('webhook.trigger', event, targetUrl); onError(webhook) {
const triggeredAt = Date.now(); return (err) => {
request(targetUrl, { if (err.statusCode === 410) {
common.logging.info(`Webhook destroyed (410 response) for "${webhook.get('event')}" with url "${webhook.get('target_url')}".`);
return webhooks.destroy(webhook);
}
webhooks.update(webhook, {
statusCode: err.statusCode,
error: `Request failed: ${err.code || 'unknown'}`
});
common.logging.warn(`Request to ${webhook.get('target_url') || null} failed because of: ${err.code || ''}.`);
};
}
};
module.exports = (event, model) => {
webhooks.getAll(event)
.then((webhooks) => {
debug(`${webhooks.models.length} webhooks found.`);
_.each(webhooks.models, (webhook) => {
const reqPayload = JSON.stringify(payload(webhook.get('event'), model));
const url = webhook.get('target_url');
const opts = {
body: reqPayload, body: reqPayload,
headers: { headers: {
'Content-Length': Buffer.byteLength(reqPayload), 'Content-Length': Buffer.byteLength(reqPayload),
@ -27,43 +78,13 @@ function makeRequests(webhooksCollection, payload, options) {
}, },
timeout: 2 * 1000, timeout: 2 * 1000,
retries: 5 retries: 5
}).then((res) => { };
updateWebhookTriggerData(webhookId, {
last_triggered_at: triggeredAt, common.logging.info(`Trigger Webhook for "${webhook.get('event')}" with url "${url}".`);
last_triggered_status: res.statusCode
}); request(url, opts)
}).catch((err) => { .then(response.onSuccess(webhook))
// when a webhook responds with a 410 Gone response we should remove the hook .catch(response.onError(webhook));
if (err.statusCode === 410) {
common.logging.info('webhook.destroy (410 response)', event, targetUrl);
return models.Webhook.destroy({id: webhookId}, options).catch(() => {
common.logging.warn(`Unable to destroy webhook ${webhookId}`);
});
}
let lastTriggeredError = err.statusCode ? '' : `Request failed: ${err.code || ''}`;
updateWebhookTriggerData(webhookId, {
last_triggered_at: triggeredAt,
last_triggered_status: err.statusCode,
last_triggered_error: lastTriggeredError
});
common.logging.warn(`Request to ${targetUrl} failed ${err.code || ''}.`);
}); });
}); });
} };
function trigger(event, payload, options) {
let tasks;
function doQuery(options) {
return models.Webhook.findAllByEvent(event, options);
}
tasks = [
doQuery,
_.partialRight(makeRequests, payload, options)
];
return pipeline(tasks, options);
}
module.exports = trigger;

View File

@ -1454,11 +1454,12 @@ describe('Post Model', function () {
should.equal(deleted.author, undefined); should.equal(deleted.author, undefined);
Object.keys(eventsTriggered).length.should.eql(4); Object.keys(eventsTriggered).length.should.eql(5);
should.exist(eventsTriggered['post.unpublished']); should.exist(eventsTriggered['post.unpublished']);
should.exist(eventsTriggered['post.deleted']); should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['user.detached']); should.exist(eventsTriggered['user.detached']);
should.exist(eventsTriggered['tag.detached']); should.exist(eventsTriggered['tag.detached']);
should.exist(eventsTriggered['post.tag.detached']);
// Double check we can't find the post again // Double check we can't find the post again
return models.Post.findOne(firstItemData); return models.Post.findOne(firstItemData);
@ -1494,9 +1495,10 @@ describe('Post Model', function () {
should.equal(deleted.author, undefined); should.equal(deleted.author, undefined);
Object.keys(eventsTriggered).length.should.eql(3); Object.keys(eventsTriggered).length.should.eql(4);
should.exist(eventsTriggered['post.deleted']); should.exist(eventsTriggered['post.deleted']);
should.exist(eventsTriggered['tag.detached']); should.exist(eventsTriggered['tag.detached']);
should.exist(eventsTriggered['post.tag.detached']);
should.exist(eventsTriggered['user.detached']); should.exist(eventsTriggered['user.detached']);
// Double check we can't find the post again // Double check we can't find the post again

View File

@ -1,103 +0,0 @@
const _ = require('lodash');
const should = require('should');
const sinon = require('sinon');
const rewire = require('rewire');
const testUtils = require('../../utils');
const common = require('../../../server/lib/common');
// Stuff we test
const webhooks = {
listen: rewire('../../../server/services/webhooks/listen'),
trigger: rewire('../../../server/services/webhooks/trigger')
};
describe('Webhooks', function () {
var eventStub;
beforeEach(function () {
eventStub = sinon.stub(common.events, 'on');
});
afterEach(function () {
sinon.restore();
});
it('listen() should initialise events correctly', function () {
webhooks.listen();
eventStub.calledThrice.should.be.true();
});
it('listener() with "subscriber.added" event calls webhooks.trigger with toJSONified model', function () {
var testSubscriber = _.clone(testUtils.DataGenerator.Content.subscribers[0]),
testModel = {
toJSON: function () {
return testSubscriber;
}
},
webhooksStub = {
trigger: sinon.stub()
},
resetWebhooks = webhooks.listen.__set__('webhooks', webhooksStub),
listener = webhooks.listen.__get__('listener'),
triggerArgs;
listener('subscriber.added', testModel);
webhooksStub.trigger.calledOnce.should.be.true();
triggerArgs = webhooksStub.trigger.getCall(0).args;
triggerArgs[0].should.eql('subscriber.added');
triggerArgs[1].should.deepEqual({
subscribers: [testSubscriber],
event: 'subscriber.added'
});
resetWebhooks();
});
it('listener() with "subscriber.deleted" event calls webhooks.trigger with _previousAttributes values', function () {
var testSubscriber = _.clone(testUtils.DataGenerator.Content.subscribers[1]),
testModel = {
_previousAttributes: testSubscriber
},
webhooksStub = {
trigger: sinon.stub()
},
resetWebhooks = webhooks.listen.__set__('webhooks', webhooksStub),
listener = webhooks.listen.__get__('listener'),
triggerArgs;
listener('subscriber.deleted', testModel);
webhooksStub.trigger.calledOnce.should.be.true();
triggerArgs = webhooksStub.trigger.getCall(0).args;
triggerArgs[0].should.eql('subscriber.deleted');
triggerArgs[1].should.deepEqual({
subscribers: [testSubscriber],
event: 'subscriber.deleted'
});
resetWebhooks();
});
it('listener() with "site.changed" event calls webhooks.trigger ', function () {
const webhooksStub = {
trigger: sinon.stub()
};
const resetWebhooks = webhooks.listen.__set__('webhooks', webhooksStub);
const listener = webhooks.listen.__get__('listener');
let triggerArgs;
listener('site.changed');
webhooksStub.trigger.calledOnce.should.be.true();
triggerArgs = webhooksStub.trigger.getCall(0).args;
triggerArgs[0].should.eql('site.changed');
triggerArgs[1].should.eql({
event: 'site.changed'
});
resetWebhooks();
});
});