Ghost/core/server/data/schema/commands.js

435 lines
17 KiB
JavaScript
Raw Normal View History

const _ = require('lodash');
const Promise = require('bluebird');
const {i18n} = require('../../lib/common');
const logging = require('../../../shared/logging');
const db = require('../db');
const schema = require('./schema');
const clients = require('./clients');
function addTableColumn(tableName, table, columnName, columnSpec = schema[tableName][columnName]) {
let column;
// creation distinguishes between text with fieldtype, string with maxlength and all others
if (columnSpec.type === 'text' && Object.prototype.hasOwnProperty.call(columnSpec, 'fieldtype')) {
column = table[columnSpec.type](columnName, columnSpec.fieldtype);
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
} else if (columnSpec.type === 'string') {
if (Object.prototype.hasOwnProperty.call(columnSpec, 'maxlength')) {
✨ replace auto increment id's by object id (#7495) * 🛠 bookshelf tarball, bson-objectid * 🎨 schema changes - change increment type to string - add a default fallback for string length 191 (to avoid adding this logic to every single column which uses an ID) - remove uuid, because ID now represents a global resource identifier - keep uuid for post, because we are using this as preview id - keep uuid for clients for now - we are using this param for Ghost-Auth * ✨ base model: generate ObjectId on creating event - each new resource get's a auto generate ObjectId - this logic won't work for attached models, this commit comes later * 🎨 centralised attach method When attaching models there are two things important two know 1. To be able to attach an ObjectId, we need to register the `onCreating` event the fetched model!This is caused by the Bookshelf design in general. On this target model we are attaching the new model. 2. We need to manually fetch the target model, because Bookshelf has a weird behaviour (which is known as a bug, see see https://github.com/tgriesser/bookshelf/issues/629). The most important property when attaching a model is `parentFk`, which is the foreign key. This can be null when fetching the model with the option `withRelated`. To ensure quality and consistency, the custom attach wrapper always fetches the target model manual. By fetching the target model (again) is a little performance decrease, but it also has advantages: we can register the event, and directly unregister the event again. So very clean code. Important: please only use the custom attach wrapper in the future. * 🎨 token model had overriden the onCreating function because of the created_at field - we need to ensure that the base onCreating hook get's triggered for ALL models - if not, they don't get an ObjectId assigned - in this case: be smart and check if the target model has a created_at field * 🎨 we don't have a uuid field anymore, remove the usages - no default uuid creation in models - i am pretty sure we have some more definitions in our tests (for example in the export json files), but that is too much work to delete them all * 🎨 do not parse ID to Number - we had various occurances of parsing all ID's to numbers - we don't need this behaviour anymore - ID is string - i will adapt the ID validation in the next commit * 🎨 change ID regex for validation - we only allow: ID as ObjectId, ID as 1 and ID as me - we need to keep ID 1, because our whole software relies on ID 1 (permissions etc) * 🎨 owner fixture - roles: [4] does not work anymore - 4 means -> static id 4 - this worked in an auto increment system (not even in a system with distributed writes) - with ObjectId we generate each ID automatically (for static and dynamic resources) - it is possible to define all id's for static resources still, but that means we need to know which ID is already used and for consistency we have to define ObjectId's for these static resources - so no static id's anymore, except of: id 1 for owner and id 0 for external usage (because this is required from our permission system) - NOTE: please read through the comment in the user model * 🎨 tests: DataGenerator and test utils First of all: we need to ensure using ObjectId's in the tests. When don't, we can't ensure that ObjectId's work properly. This commit brings lot's of dynamic into all the static defined id's. In one of the next commits, i will adapt all the tests. * 🚨 remove counter in Notification API - no need to add a counter - we simply generate ObjectId's (they are auto incremental as well) - our id validator does only allow ObjectId as id,1 and me * 🎨 extend contextUser in Base Model - remove isNumber check, because id's are no longer numbers, except of id 0/1 - use existing isExternalUser - support id 0/1 as string or number * ✨ Ghost Owner has id 1 - ensure we define this id in the fixtures.json - doesn't matter if number or string * 🎨 functional tests adaptions - use dynamic id's * 🎨 fix unit tests * 🎨 integration tests adaptions * 🎨 change importer utils - all our export examples (test/fixtures/exports) contain id's as numbers - fact: but we ignore them anyway when inserting into the database, see https://github.com/TryGhost/Ghost/blob/master/core/server/data/import/utils.js#L249 - in https://github.com/TryGhost/Ghost/pull/7495/commits/0e6ed957cd54dc02a25cf6fb1ab7d7e723295e2c#diff-70f514a06347c048648be464819503c4L67 i removed parsing id's to integers - i realised that this ^ check just existed, because the userIdToMap was an object key and object keys are always strings! - i think this logic is a little bit complicated, but i don't want to refactor this now - this commit ensures when trying to find the user, the id comparison works again - i've added more documentation to understand this logic ;) - plus i renamed an attribute to improve readability * 🎨 Data-Generator: add more defaults to createUser - if i use the function DataGenerator.forKnex.createUser i would like to get a full set of defaults * 🎨 test utils: change/extend function set for functional tests - functional tests work a bit different - they boot Ghost and seed the database - some functional tests have mis-used the test setup - the test setup needs two sections: integration/unit and functional tests - any functional test is allowed to either add more data or change data in the existing Ghost db - but what it should not do is: add test fixtures like roles or users from our DataGenerator and cross fingers it will work - this commit adds a clean method for functional tests to add extra users * 🎨 functional tests adaptions - use last commit to insert users for functional tests clean - tidy up usage of testUtils.setup or testUtils.doAuth * 🐛 test utils: reset database before init - ensure we don't have any left data from other tests in the database when starting ghost * 🐛 fix test (unrelated to this PR) - fixes a random failure - return statement was missing * 🎨 make changes for invites
2016-11-17 12:09:11 +03:00
column = table[columnSpec.type](columnName, columnSpec.maxlength);
} else {
column = table[columnSpec.type](columnName, 191);
}
} else {
column = table[columnSpec.type](columnName);
}
if (Object.prototype.hasOwnProperty.call(columnSpec, 'nullable') && columnSpec.nullable === true) {
column.nullable();
} else {
column.nullable(false);
}
if (Object.prototype.hasOwnProperty.call(columnSpec, 'primary') && columnSpec.primary === true) {
column.primary();
}
if (Object.prototype.hasOwnProperty.call(columnSpec, 'unique') && columnSpec.unique) {
column.unique();
}
if (Object.prototype.hasOwnProperty.call(columnSpec, 'unsigned') && columnSpec.unsigned) {
column.unsigned();
}
if (Object.prototype.hasOwnProperty.call(columnSpec, 'references')) {
// check if table exists?
column.references(columnSpec.references);
}
if (Object.prototype.hasOwnProperty.call(columnSpec, 'cascadeDelete') && columnSpec.cascadeDelete === true) {
column.onDelete('CASCADE');
}
if (Object.prototype.hasOwnProperty.call(columnSpec, 'defaultTo')) {
column.defaultTo(columnSpec.defaultTo);
}
if (Object.prototype.hasOwnProperty.call(columnSpec, 'index') && columnSpec.index === true) {
column.index();
}
}
function addColumn(tableName, column, transaction, columnSpec) {
return (transaction || db.knex).schema.table(tableName, function (table) {
addTableColumn(tableName, table, column, columnSpec);
});
}
function dropColumn(tableName, column, transaction) {
return (transaction || db.knex).schema.table(tableName, function (table) {
table.dropColumn(column);
});
}
/**
* Checks if unique index exists in a table over the given columns.
*
* @param {string} tableName - name of the table to add unique constraint to
* @param {string|[string]} columns - column(s) to form unique constraint with
2021-02-24 02:04:53 +03:00
* @param {Object} transaction - connection object containing knex reference
* @param {Object} transaction.knex - knex instance
*/
async function hasUnique(tableName, columns, transaction) {
const knex = (transaction || db.knex);
const client = knex.client.config.client;
const columnNames = _.isArray(columns) ? columns.join('_') : columns;
const constraintName = `${tableName}_${columnNames}_unique`;
if (client === 'mysql') {
const dbName = knex.client.config.connection.database;
const [rawConstraints] = await knex.raw(`
SELECT CONSTRAINT_NAME
FROM information_schema.TABLE_CONSTRAINTS
WHERE 1=1
AND CONSTRAINT_SCHEMA=:dbName
AND TABLE_NAME=:tableName
AND CONSTRAINT_TYPE='UNIQUE'`, {dbName, tableName});
const dbConstraints = rawConstraints.map(c => c.CONSTRAINT_NAME);
if (dbConstraints.includes(constraintName)) {
return true;
}
} else {
const rawConstraints = await knex.raw(`PRAGMA index_list('${tableName}');`);
const dbConstraints = rawConstraints.map(c => c.name);
if (dbConstraints.includes(constraintName)) {
return true;
}
}
return false;
}
/**
* Adds an unique index to a table over the given columns.
*
* @param {string} tableName - name of the table to add unique constraint to
* @param {string|[string]} columns - column(s) to form unique constraint with
2021-02-24 02:04:53 +03:00
* @param {Object} transaction - connection object containing knex reference
* @param {Object} transaction.knex - knex instance
*/
async function addUnique(tableName, columns, transaction) {
const hasUniqueConstraint = await hasUnique(tableName, columns, transaction);
if (!hasUniqueConstraint) {
logging.info(`Adding unique constraint for: ${columns} in table ${tableName}`);
return (transaction || db.knex).schema.table(tableName, function (table) {
table.unique(columns);
});
} else {
logging.warn(`Constraint for: ${columns} already exists for table: ${tableName}`);
}
}
/**
* Drops a unique key constraint from a table.
*
* @param {string} tableName - name of the table to drop unique constraint from
* @param {string|[string]} columns - column(s) unique constraint was formed
2021-02-24 02:04:53 +03:00
* @param {Object} transaction - connection object containing knex reference
* @param {Object} transaction.knex - knex instance
*/
async function dropUnique(tableName, columns, transaction) {
const hasUniqueConstraint = await hasUnique(tableName, columns, transaction);
if (hasUniqueConstraint) {
logging.info(`Dropping unique constraint for: ${columns} in table: ${tableName}`);
return (transaction || db.knex).schema.table(tableName, function (table) {
table.dropUnique(columns);
});
} else {
logging.warn(`Constraint for: ${columns} does not exist for table: ${tableName}`);
}
}
/**
* Checks if a foreign key exists in a table over the given columns.
*
* @param {Object} configuration - contains all configuration for this function
* @param {string} configuration.fromTableName - name of the table to add the foreign key to
* @param {string} configuration.fromColumn - column of the table to add the foreign key to
* @param {string} configuration.toTableName - name of the table to point the foreign key to
* @param {string} configuration.toColumn - column of the table to point the foreign key to
* @param {Object} configuration.transaction - connection object containing knex reference
* @param {Object} configuration.transaction.knex - knex instance
*/
async function hasForeign({fromTable, fromColumn, toTable, toColumn, transaction}) {
const knex = (transaction || db.knex);
const client = knex.client.config.client;
if (client === 'mysql') {
const dbName = knex.client.config.connection.database;
const [rawConstraints] = await knex.raw(`
SELECT i.TABLE_NAME, k.COLUMN_NAME, k.REFERENCED_TABLE_NAME, k.REFERENCED_COLUMN_NAME
FROM information_schema.TABLE_CONSTRAINTS i
INNER JOIN information_schema.KEY_COLUMN_USAGE k ON i.CONSTRAINT_NAME = k.CONSTRAINT_NAME
WHERE i.CONSTRAINT_TYPE = 'FOREIGN KEY'
AND i.CONSTRAINT_SCHEMA=:dbName
AND i.TABLE_NAME = :fromTable
AND k.COLUMN_NAME = :fromColumn
AND k.REFERENCED_TABLE_NAME = :toTable
AND k.REFERENCED_COLUMN_NAME = :toColumn`, {dbName, fromTable, fromColumn, toTable, toColumn});
return rawConstraints.length >= 1;
} else {
const foreignKeys = await knex.raw(`PRAGMA foreign_key_list('${fromTable}');`);
const hasForeignKey = foreignKeys.some(foreignKey => foreignKey.table === toTable && foreignKey.from === fromColumn && foreignKey.to === toColumn);
return hasForeignKey;
}
}
/**
* Adds a foreign key to a table.
*
* @param {Object} configuration - contains all configuration for this function
* @param {string} configuration.fromTableName - name of the table to add the foreign key to
* @param {string} configuration.fromColumn - column of the table to add the foreign key to
* @param {string} configuration.toTableName - name of the table to point the foreign key to
* @param {string} configuration.toColumn - column of the table to point the foreign key to
* @param {Boolean} configuration.cascadeDelete - adds the "on delete cascade" option if true
* @param {Object} configuration.transaction - connection object containing knex reference
* @param {Object} configuration.transaction.knex - knex instance
*/
async function addForeign({fromTable, fromColumn, toTable, toColumn, cascadeDelete = false, transaction}) {
const hasForeignKey = await hasForeign({fromTable, fromColumn, toTable, toColumn, transaction});
if (!hasForeignKey) {
logging.info(`Adding foreign key for: ${fromColumn} in ${fromTable} to ${toColumn} in ${toTable}`);
//disable and re-enable foreign key checks on sqlite because of https://github.com/knex/knex/issues/4155
let foreignKeysEnabled;
if (db.knex.client.config.client === 'sqlite3') {
foreignKeysEnabled = await db.knex.raw('PRAGMA foreign_keys;');
if (foreignKeysEnabled[0].foreign_keys) {
await db.knex.raw('PRAGMA foreign_keys = OFF;');
}
}
await (transaction || db.knex).schema.table(fromTable, function (table) {
if (cascadeDelete) {
table.foreign(fromColumn).references(`${toTable}.${toColumn}`).onDelete('CASCADE');
} else {
table.foreign(fromColumn).references(`${toTable}.${toColumn}`);
}
});
if (db.knex.client.config.client === 'sqlite3') {
if (foreignKeysEnabled[0].foreign_keys) {
await db.knex.raw('PRAGMA foreign_keys = ON;');
}
}
} else {
logging.warn(`Skipped adding foreign key for ${fromColumn} in ${fromTable} to ${toColumn} in ${toTable} - foreign key already exists`);
}
}
/**
* Drops a foreign key from a table.
*
* @param {Object} configuration - contains all configuration for this function
* @param {string} configuration.fromTableName - name of the table to add the foreign key to
* @param {string} configuration.fromColumn - column of the table to add the foreign key to
* @param {string} configuration.toTableName - name of the table to point the foreign key to
* @param {string} configuration.toColumn - column of the table to point the foreign key to
* @param {Object} configuration.transaction - connection object containing knex reference
* @param {Object} configuration.transaction.knex - knex instance
*/
async function dropForeign({fromTable, fromColumn, toTable, toColumn, transaction}) {
const hasForeignKey = await hasForeign({fromTable, fromColumn, toTable, toColumn, transaction});
if (hasForeignKey) {
logging.info(`Dropping foreign key for: ${fromColumn} in ${fromTable} to ${toColumn} in ${toTable}`);
//disable and re-enable foreign key checks on sqlite because of https://github.com/knex/knex/issues/4155
let foreignKeysEnabled;
if (db.knex.client.config.client === 'sqlite3') {
foreignKeysEnabled = await db.knex.raw('PRAGMA foreign_keys;');
if (foreignKeysEnabled[0].foreign_keys) {
await db.knex.raw('PRAGMA foreign_keys = OFF;');
}
}
await (transaction || db.knex).schema.table(fromTable, function (table) {
table.dropForeign(fromColumn);
});
if (db.knex.client.config.client === 'sqlite3') {
if (foreignKeysEnabled[0].foreign_keys) {
await db.knex.raw('PRAGMA foreign_keys = ON;');
}
}
} else {
logging.warn(`Skipped dropping foreign key for ${fromColumn} in ${fromTable} to ${toColumn} in ${toTable} - foreign key does not exist`);
}
}
/**
* Checks if primary key index exists in a table over the given columns.
*
* @param {string} tableName - name of the table to add primary key constraint to
* @param {Object} transaction - connnection object containing knex reference
* @param {Object} transaction.knex - knex instance
*/
async function hasPrimaryKey(tableName, transaction) {
const knex = (transaction || db.knex);
const client = knex.client.config.client;
if (client === 'mysql') {
const dbName = knex.client.config.connection.database;
const [rawConstraints] = await knex.raw(`
SELECT CONSTRAINT_NAME
FROM information_schema.TABLE_CONSTRAINTS
WHERE 1=1
AND CONSTRAINT_SCHEMA=:dbName
AND TABLE_NAME=:tableName
AND CONSTRAINT_TYPE='PRIMARY KEY'`, {dbName, tableName});
return rawConstraints.length > 0;
} else {
const rawConstraints = await knex.raw(`PRAGMA index_list('${tableName}');`);
const tablePrimaryKey = rawConstraints.find(c => c.origin === 'pk');
return tablePrimaryKey;
}
}
/**
* Adds an primary key index to a table over the given columns.
*
* @param {string} tableName - name of the table to add primaykey constraint to
* @param {string|[string]} columns - column(s) to form primary key constraint with
* @param {Object} transaction - connnection object containing knex reference
* @param {Object} transaction.knex - knex instance
*/
async function addPrimaryKey(tableName, columns, transaction) {
const hasUniqueConstraint = await hasPrimaryKey(tableName, transaction);
if (!hasUniqueConstraint) {
logging.info(`Adding primary key constraint for: ${columns} in table ${tableName}`);
return (transaction || db.knex).schema.table(tableName, function (table) {
table.primary(columns);
});
} else {
logging.warn(`Primary key constraint for: ${columns} already exists for table: ${tableName}`);
}
}
/**
* https://github.com/tgriesser/knex/issues/1303
* createTableIfNotExists can throw error if indexes are already in place
*/
function createTable(table, transaction, tableSpec = schema[table]) {
return (transaction || db.knex).schema.hasTable(table)
.then(function (exists) {
if (exists) {
return;
}
return (transaction || db.knex).schema.createTable(table, function (t) {
Object.keys(tableSpec)
.filter(column => !(column.startsWith('@@')))
.forEach(column => addTableColumn(table, t, column, tableSpec[column]));
if (tableSpec['@@INDEXES@@']) {
tableSpec['@@INDEXES@@'].forEach(index => t.index(index));
}
if (tableSpec['@@UNIQUE_CONSTRAINTS@@']) {
tableSpec['@@UNIQUE_CONSTRAINTS@@'].forEach(unique => t.unique(unique));
}
});
});
}
function deleteTable(table, transaction) {
return (transaction || db.knex).schema.dropTableIfExists(table);
}
function getTables(transaction) {
const client = (transaction || db.knex).client.config.client;
if (_.includes(_.keys(clients), client)) {
return clients[client].getTables(transaction);
}
Refactored `common` lib import to use destructuring (#11835) * refactored `core/frontend/apps` to destructure common imports * refactored `core/frontend/services/{apps, redirects, routing}` to destructure common imports * refactored `core/frontend/services/settings` to destructure common imports * refactored remaining `core/frontend/services` to destructure common imports * refactored `core/server/adapters` to destructure common imports * refactored `core/server/data/{db, exporter, schema, validation}` to destructure common imports * refactored `core/server/data/importer` to destructure common imports * refactored `core/server/models/{base, plugins, relations}` to destructure common imports * refactored remaining `core/server/models` to destructure common imports * refactored `core/server/api/canary/utils/serializers/output` to destructure common imports * refactored remaining `core/server/api/canary/utils` to destructure common imports * refactored remaining `core/server/api/canary` to destructure common imports * refactored `core/server/api/shared` to destructure common imports * refactored `core/server/api/v2/utils` to destructure common imports * refactored remaining `core/server/api/v2` to destructure common imports * refactored `core/frontend/meta` to destructure common imports * fixed some tests referencing `common.errors` instead of `@tryghost/errors` - Not all of them need to be updated; only updating the ones that are causing failures * fixed errors import being shadowed by local scope
2020-05-22 21:22:20 +03:00
return Promise.reject(i18n.t('notices.data.utils.index.noSupportForDatabase', {client: client}));
}
function getIndexes(table, transaction) {
const client = (transaction || db.knex).client.config.client;
if (_.includes(_.keys(clients), client)) {
return clients[client].getIndexes(table, transaction);
}
Refactored `common` lib import to use destructuring (#11835) * refactored `core/frontend/apps` to destructure common imports * refactored `core/frontend/services/{apps, redirects, routing}` to destructure common imports * refactored `core/frontend/services/settings` to destructure common imports * refactored remaining `core/frontend/services` to destructure common imports * refactored `core/server/adapters` to destructure common imports * refactored `core/server/data/{db, exporter, schema, validation}` to destructure common imports * refactored `core/server/data/importer` to destructure common imports * refactored `core/server/models/{base, plugins, relations}` to destructure common imports * refactored remaining `core/server/models` to destructure common imports * refactored `core/server/api/canary/utils/serializers/output` to destructure common imports * refactored remaining `core/server/api/canary/utils` to destructure common imports * refactored remaining `core/server/api/canary` to destructure common imports * refactored `core/server/api/shared` to destructure common imports * refactored `core/server/api/v2/utils` to destructure common imports * refactored remaining `core/server/api/v2` to destructure common imports * refactored `core/frontend/meta` to destructure common imports * fixed some tests referencing `common.errors` instead of `@tryghost/errors` - Not all of them need to be updated; only updating the ones that are causing failures * fixed errors import being shadowed by local scope
2020-05-22 21:22:20 +03:00
return Promise.reject(i18n.t('notices.data.utils.index.noSupportForDatabase', {client: client}));
}
function getColumns(table, transaction) {
const client = (transaction || db.knex).client.config.client;
if (_.includes(_.keys(clients), client)) {
return clients[client].getColumns(table);
}
Refactored `common` lib import to use destructuring (#11835) * refactored `core/frontend/apps` to destructure common imports * refactored `core/frontend/services/{apps, redirects, routing}` to destructure common imports * refactored `core/frontend/services/settings` to destructure common imports * refactored remaining `core/frontend/services` to destructure common imports * refactored `core/server/adapters` to destructure common imports * refactored `core/server/data/{db, exporter, schema, validation}` to destructure common imports * refactored `core/server/data/importer` to destructure common imports * refactored `core/server/models/{base, plugins, relations}` to destructure common imports * refactored remaining `core/server/models` to destructure common imports * refactored `core/server/api/canary/utils/serializers/output` to destructure common imports * refactored remaining `core/server/api/canary/utils` to destructure common imports * refactored remaining `core/server/api/canary` to destructure common imports * refactored `core/server/api/shared` to destructure common imports * refactored `core/server/api/v2/utils` to destructure common imports * refactored remaining `core/server/api/v2` to destructure common imports * refactored `core/frontend/meta` to destructure common imports * fixed some tests referencing `common.errors` instead of `@tryghost/errors` - Not all of them need to be updated; only updating the ones that are causing failures * fixed errors import being shadowed by local scope
2020-05-22 21:22:20 +03:00
return Promise.reject(i18n.t('notices.data.utils.index.noSupportForDatabase', {client: client}));
}
function checkTables(transaction) {
const client = (transaction || db.knex).client.config.client;
if (client === 'mysql') {
return clients[client].checkPostTable();
}
}
function createColumnMigration(...migrations) {
async function runColumnMigration(conn, migration) {
const {
table,
column,
dbIsInCorrectState,
operation,
operationVerb,
columnDefinition
} = migration;
const hasColumn = await conn.schema.hasColumn(table, column);
const isInCorrectState = dbIsInCorrectState(hasColumn);
if (isInCorrectState) {
logging.warn(`${operationVerb} ${table}.${column} column - skipping as table is correct`);
} else {
logging.info(`${operationVerb} ${table}.${column} column`);
await operation(table, column, conn, columnDefinition);
}
}
return async function columnMigration(conn) {
for (const migration of migrations) {
await runColumnMigration(conn, migration);
}
};
}
module.exports = {
checkTables: checkTables,
createTable: createTable,
deleteTable: deleteTable,
getTables: getTables,
getIndexes: getIndexes,
addUnique: addUnique,
dropUnique: dropUnique,
addPrimaryKey: addPrimaryKey,
addForeign: addForeign,
dropForeign: dropForeign,
addColumn: addColumn,
dropColumn: dropColumn,
getColumns: getColumns,
createColumnMigration
};