diff --git a/core/server/services/url/LocalFileCache.js b/core/server/services/url/LocalFileCache.js new file mode 100644 index 0000000000..cbb46255e3 --- /dev/null +++ b/core/server/services/url/LocalFileCache.js @@ -0,0 +1,69 @@ +const fs = require('fs-extra'); +const path = require('path'); + +class LocalFileCache { + /** + * @param {Object} options + * @param {String} options.storagePath - cached storage path + */ + constructor({storagePath}) { + const urlsStoragePath = path.join(storagePath, 'urls.json'); + const resourcesCachePath = path.join(storagePath, 'resources.json'); + + this.storagePaths = { + urls: urlsStoragePath, + resources: resourcesCachePath + }; + } + + /** + * Handles reading and parsing JSON from the filesystem. + * In case the file is corrupted or does not exist, returns null. + * @param {String} filePath path to read from + * @returns {Promise} + * @private + */ + async readCacheFile(filePath) { + let cacheExists = false; + let cacheData = null; + + try { + await fs.stat(filePath); + cacheExists = true; + } catch (e) { + cacheExists = false; + } + + if (cacheExists) { + try { + const cacheFile = await fs.readFile(filePath, 'utf8'); + cacheData = JSON.parse(cacheFile); + } catch (e) { + //noop as we'd start a long boot process if there are any errors in the file + } + } + + return cacheData; + } + + /** + * + * @param {'urls'|'resources'} type + * @returns {Promise} + */ + async read(type) { + return await this.readCacheFile(this.storagePaths[type]); + } + + /** + * + * @param {'urls'|'resources'} type of data to persist + * @param {Object} data - data to be persisted + * @returns {Promise} + */ + async write(type, data) { + return fs.writeFile(this.storagePaths[type], JSON.stringify(data, null, 4)); + } +} + +module.exports = LocalFileCache; diff --git a/core/server/services/url/UrlService.js b/core/server/services/url/UrlService.js index fbc333612c..1e082ef157 100644 --- a/core/server/services/url/UrlService.js +++ b/core/server/services/url/UrlService.js @@ -1,4 +1,3 @@ -const fs = require('fs-extra'); const _debug = require('@tryghost/debug')._base; const debug = _debug('ghost:services:url:service'); const _ = require('lodash'); @@ -22,13 +21,13 @@ class UrlService { /** * * @param {Object} options - * @param {String} [options.urlsCachePath] - cached URLs storage path - * @param {String} [options.resourcesCachePath] - cached resources storage path + * @param {Object} [options.cache] - cache handler instance + * @param {Function} [options.cache.read] - read cache by type + * @param {Function} [options.cache.write] - write into cache by type */ - constructor({urlsCachePath, resourcesCachePath} = {}) { + constructor({cache} = {}) { this.utils = urlUtils; - this.urlsCachePath = urlsCachePath; - this.resourcesCachePath = resourcesCachePath; + this.cache = cache; this.onFinished = null; this.finished = false; this.urlGenerators = []; @@ -328,8 +327,8 @@ class UrlService { let persistedResources; if (labs.isSet('urlCache') || urlCache) { - persistedUrls = await this.readCacheFile(this.urlsCachePath); - persistedResources = await this.readCacheFile(this.resourcesCachePath); + persistedUrls = await this.cache.read('urls'); + persistedResources = await this.cache.read('resources'); } if (persistedUrls && persistedResources) { @@ -362,35 +361,8 @@ class UrlService { return null; } - await this.persistToCacheFile(this.urlsCachePath, this.urls.urls); - await this.persistToCacheFile(this.resourcesCachePath, this.resources.getAll()); - } - - async persistToCacheFile(filePath, data) { - return fs.writeFile(filePath, JSON.stringify(data, null, 4)); - } - - async readCacheFile(filePath) { - let cacheExists = false; - let cacheData; - - try { - await fs.stat(filePath); - cacheExists = true; - } catch (e) { - cacheExists = false; - } - - if (cacheExists) { - try { - const cacheFile = await fs.readFile(filePath, 'utf8'); - cacheData = JSON.parse(cacheFile); - } catch (e) { - //noop as we'd start a long boot process if there are any errors in the file - } - } - - return cacheData; + await this.cache.write('urls', this.urls.urls); + await this.cache.write('resources', this.resources.getAll()); } /** diff --git a/core/server/services/url/index.js b/core/server/services/url/index.js index 1199597ac0..a7e8805930 100644 --- a/core/server/services/url/index.js +++ b/core/server/services/url/index.js @@ -1,22 +1,21 @@ -const path = require('path'); const config = require('../../../shared/config'); +const LocalFileCache = require('./LocalFileCache'); const UrlService = require('./UrlService'); // NOTE: instead of a path we could give UrlService a "data-resolver" of some sort // so it doesn't have to contain the logic to read data at all. This would be // a possible improvement in the future -let urlsCachePath = path.join(config.getContentPath('data'), 'urls.json'); -let resourcesCachePath = path.join(config.getContentPath('data'), 'resources.json'); +let storagePath = config.getContentPath('data'); // TODO: remove this hack in favor of loading from the content path when it's possible to do so // by mocking content folders in pre-boot phase if (process.env.NODE_ENV.match(/^testing/)){ - urlsCachePath = path.join(config.get('paths').urlCache, 'urls.json'); - resourcesCachePath = path.join(config.get('paths').urlCache, 'resources.json'); + storagePath = config.get('paths').urlCache; } -const urlService = new UrlService({urlsCachePath, resourcesCachePath}); +const cache = new LocalFileCache({storagePath}); +const urlService = new UrlService({cache}); // Singleton module.exports = urlService; diff --git a/test/unit/frontend/services/url/LocalFileCache.test.js b/test/unit/frontend/services/url/LocalFileCache.test.js new file mode 100644 index 0000000000..96a15131a4 --- /dev/null +++ b/test/unit/frontend/services/url/LocalFileCache.test.js @@ -0,0 +1,55 @@ +const should = require('should'); +const sinon = require('sinon'); +const fs = require('fs-extra'); + +const LocalFileCache = require('../../../../../core/server/services/url/LocalFileCache'); + +describe('Unit: services/url/LocalFileCache', function () { + afterEach(function () { + sinon.restore(); + }); + + describe('read', function () { + it('reads from file system by type', async function () { + const storagePath = '/tmp/url-cache/'; + sinon.stub(fs, 'stat') + .withArgs(`${storagePath}urls.json`) + .resolves(true); + sinon.stub(fs, 'readFile') + .withArgs(`${storagePath}urls.json`) + .resolves(JSON.stringify({urls: 'urls!'})); + + const localFileCache = new LocalFileCache({storagePath}); + + const cachedUrls = await localFileCache.read('urls'); + + cachedUrls.should.not.be.undefined(); + cachedUrls.urls.should.equal('urls!'); + }); + + it('returns null when the cache file does not exit', async function () { + const storagePath = '/tmp/empty-url-cache/'; + const localFileCache = new LocalFileCache({storagePath}); + + const cachedUrls = await localFileCache.read('urls'); + + should.equal(cachedUrls, null); + }); + + it('returns null when the cache file is malformatted', async function () { + const storagePath = '/tmp/empty-url-cache/'; + sinon.stub(fs, 'stat') + .withArgs(`${storagePath}urls.json`) + .resolves(true); + sinon.stub(fs, 'readFile') + .withArgs(`${storagePath}urls.json`) + .resolves('I am not a valid JSON'); + + const localFileCache = new LocalFileCache({storagePath}); + + const cachedUrls = await localFileCache.read('urls'); + + should.equal(cachedUrls, null); + }); + }); +});