mirror of
https://github.com/TryGhost/Ghost.git
synced 2024-12-22 10:21:36 +03:00
b392d1925a
refs #9601 ### Dynamic Routing This is the beta version of dynamic routing. - we had a initial implementation of "channels" available in the codebase - we have removed and moved this implementation - there is now a centralised place for dynamic routing - server/services/routing - each routing component is represented by a router type e.g. collections, routes, static pages, taxonomies, rss, preview of posts - keep as much as possible logic of routing helpers, middlewares and controllers - ensure test coverage - connect all the things together - yaml file + validation - routing + routers - url service - sitemaps - url access - deeper implementation of yaml validations - e.g. hard require slashes - ensure routing hierarchy/order - e.g. you enable the subscriber app - you have a custom static page, which lives under the same slug /subscribe - static pages are stronger than apps - e.g. the first collection owns the post it has filtered - a post cannot live in two collections - ensure apps are still working and hook into the routers layer (or better said: and register in the routing service) - put as much as possible comments to the code base for better understanding - ensure a clean debug log - ensure we can unmount routes - e.g. you have a collection permalink of /:slug/ represented by {globals.permalink} - and you change the permalink in the admin to dated permalink - the express route get's refreshed from /:slug/ to /:year/:month/:day/:slug/ - unmount without server restart, yey - ensure we are backwards compatible - e.g. render home.hbs for collection index if collection route is / - ensure you can access your configured permalink from the settings table with {globals.permalink} ### Render 503 if url service did not finish - return 503 if the url service has not finished generating the resource urls ### Rewrite sitemaps - we have rewritten the sitemaps "service", because the url generator does no longer happen on runtime - we generate all urls on bootstrap - the sitemaps service will consume created resource and router urls - these urls will be shown on the xml pages - we listen on url events - we listen on router events - we no longer have to fetch the resources, which is nice - the urlservice pre-fetches resources and emits their urls - the urlservice is the only component who knows which urls are valid - i made some ES6 adaptions - we keep the caching logic -> only regenerate xml if there is a change - updated tests - checked test coverage (100%) ### Re-work usage of Url utility - replace all usages of `urlService.utils.urlFor` by `urlService.getByResourceId` - only for resources e.g. post, author, tag - this is important, because with dynamic routing we no longer create static urls based on the settings permalink on runtime - adapt url utility - adapt tests
160 lines
4.7 KiB
JavaScript
160 lines
4.7 KiB
JavaScript
// # DB API
|
|
// API for DB operations
|
|
var Promise = require('bluebird'),
|
|
_ = require('lodash'),
|
|
pipeline = require('../lib/promise/pipeline'),
|
|
localUtils = require('./utils'),
|
|
exporter = require('../data/export'),
|
|
importer = require('../data/importer'),
|
|
backupDatabase = require('../data/db/backup'),
|
|
models = require('../models'),
|
|
common = require('../lib/common'),
|
|
docName = 'db',
|
|
db;
|
|
|
|
/**
|
|
* ## DB API Methods
|
|
*
|
|
* **See:** [API Methods](constants.js.html#api%20methods)
|
|
*/
|
|
db = {
|
|
/**
|
|
* ### Archive Content
|
|
* Generate the JSON to export
|
|
*
|
|
* @public
|
|
* @returns {Promise} Ghost Export JSON format
|
|
*/
|
|
backupContent: function (options) {
|
|
var tasks;
|
|
|
|
options = options || {};
|
|
|
|
function jsonResponse(filename) {
|
|
return {db: [{filename: filename}]};
|
|
}
|
|
|
|
tasks = [
|
|
backupDatabase,
|
|
jsonResponse
|
|
];
|
|
|
|
return pipeline(tasks, options);
|
|
},
|
|
/**
|
|
* ### Export Content
|
|
* Generate the JSON to export
|
|
*
|
|
* @public
|
|
* @param {{context}} options
|
|
* @returns {Promise} Ghost Export JSON format
|
|
*/
|
|
exportContent: function exportContent(options) {
|
|
var tasks;
|
|
|
|
options = options || {};
|
|
|
|
// Export data, otherwise send error 500
|
|
function exportContent() {
|
|
return exporter.doExport().then(function (exportedData) {
|
|
return {db: [exportedData]};
|
|
}).catch(function (err) {
|
|
return Promise.reject(new common.errors.GhostError({err: err}));
|
|
});
|
|
}
|
|
|
|
tasks = [
|
|
localUtils.handlePermissions(docName, 'exportContent'),
|
|
exportContent
|
|
];
|
|
|
|
return pipeline(tasks, options);
|
|
},
|
|
/**
|
|
* ### Import Content
|
|
* Import posts, tags etc from a JSON blob
|
|
*
|
|
* @public
|
|
* @param {{context}} options
|
|
* @returns {Promise} Success
|
|
*/
|
|
importContent: function importContent(options) {
|
|
var tasks;
|
|
options = options || {};
|
|
|
|
function importContent(options) {
|
|
return importer.importFromFile(options)
|
|
.then(function (response) {
|
|
// NOTE: response can contain 2 objects if images are imported
|
|
return {db: [], problems: response.length === 2 ? response[1].problems : response[0].problems};
|
|
});
|
|
}
|
|
|
|
tasks = [
|
|
localUtils.handlePermissions(docName, 'importContent'),
|
|
importContent
|
|
];
|
|
|
|
return pipeline(tasks, options);
|
|
},
|
|
/**
|
|
* ### Delete All Content
|
|
* Remove all posts and tags
|
|
*
|
|
* @public
|
|
* @param {{context}} options
|
|
* @returns {Promise} Success
|
|
*/
|
|
deleteAllContent: function deleteAllContent(options) {
|
|
var tasks,
|
|
queryOpts = {columns: 'id', context: {internal: true}, destroyAll: true};
|
|
|
|
options = options || {};
|
|
|
|
/**
|
|
* @NOTE:
|
|
* We fetch all posts with `columns:id` to increase the speed of this endpoint.
|
|
* And if you trigger `post.destroy(..)`, this will trigger bookshelf and model events.
|
|
* But we only have to `id` available in the model. This won't work, because:
|
|
* - model layer can't trigger event e.g. `post.page` to trigger `post|page.unpublished`.
|
|
* - `onDestroyed` or `onDestroying` can contain custom logic
|
|
*/
|
|
function deleteContent() {
|
|
return models.Base.transaction(function (transacting) {
|
|
queryOpts.transacting = transacting;
|
|
|
|
return models.Post.findAll(queryOpts)
|
|
.then((response) => {
|
|
return Promise.map(response.models, (post) => {
|
|
return models.Post.destroy(_.merge({id: post.id}, queryOpts));
|
|
}, {concurrency: 100});
|
|
})
|
|
.then(() => {
|
|
return models.Tag.findAll(queryOpts);
|
|
})
|
|
.then((response) => {
|
|
return Promise.map(response.models, (tag) => {
|
|
return models.Tag.destroy(_.merge({id: tag.id}, queryOpts));
|
|
}, {concurrency: 100});
|
|
})
|
|
.return({db: []})
|
|
.catch((err) => {
|
|
throw new common.errors.GhostError({
|
|
err: err
|
|
});
|
|
});
|
|
});
|
|
}
|
|
|
|
tasks = [
|
|
localUtils.handlePermissions(docName, 'deleteAllContent'),
|
|
backupDatabase,
|
|
deleteContent
|
|
];
|
|
|
|
return pipeline(tasks, options);
|
|
}
|
|
};
|
|
|
|
module.exports = db;
|