2016-10-03 17:21:57 +03:00
|
|
|
#!/usr/bin/env python
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
PGmigrate - PostgreSQL migrations made easy
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
#
|
2023-01-01 17:17:39 +03:00
|
|
|
# Copyright (c) 2016-2023 Yandex LLC <https://github.com/yandex>
|
|
|
|
# Copyright (c) 2016-2023 Other contributors as noted in the AUTHORS file.
|
2016-10-03 17:21:57 +03:00
|
|
|
#
|
|
|
|
# Permission to use, copy, modify, and distribute this software and its
|
|
|
|
# documentation for any purpose, without fee, and without a written
|
|
|
|
# agreement is hereby granted, provided that the above copyright notice
|
|
|
|
# and this paragraph and the following two paragraphs appear in all copies.
|
|
|
|
#
|
|
|
|
# IN NO EVENT SHALL YANDEX LLC BE LIABLE TO ANY PARTY FOR DIRECT,
|
|
|
|
# INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST
|
|
|
|
# PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION,
|
|
|
|
# EVEN IF YANDEX LLC HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
#
|
|
|
|
# YANDEX SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
|
|
|
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
|
|
|
# PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS"
|
|
|
|
# BASIS, AND YANDEX LLC HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE,
|
|
|
|
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
|
|
|
|
|
|
|
from __future__ import absolute_import, print_function, unicode_literals
|
|
|
|
|
|
|
|
import argparse
|
|
|
|
import codecs
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
import os
|
|
|
|
import re
|
|
|
|
import sys
|
2017-12-03 01:39:30 +03:00
|
|
|
import threading
|
|
|
|
import time
|
2019-11-02 02:31:24 +03:00
|
|
|
import uuid
|
2016-10-03 17:21:57 +03:00
|
|
|
from builtins import str as text
|
2017-03-31 13:38:56 +03:00
|
|
|
from collections import OrderedDict, namedtuple
|
2017-12-03 01:39:30 +03:00
|
|
|
from contextlib import closing
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
import psycopg2
|
|
|
|
import sqlparse
|
|
|
|
import yaml
|
2020-05-14 18:25:57 +03:00
|
|
|
from psycopg2.extensions import make_dsn, parse_dsn
|
2016-10-03 17:21:57 +03:00
|
|
|
from psycopg2.extras import LoggingConnection
|
2020-03-27 03:55:22 +03:00
|
|
|
from psycopg2.sql import SQL, Identifier
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
LOG = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class MigrateError(RuntimeError):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Common migration error class
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
|
|
|
class MalformedStatement(MigrateError):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Incorrect statement exception
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
|
|
|
class MalformedMigration(MigrateError):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Incorrect migration exception
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
|
|
|
class MalformedSchema(MigrateError):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Incorrect schema exception
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
2017-12-02 21:44:49 +03:00
|
|
|
class ConfigurationError(MigrateError):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Incorrect config or cmd args exception
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
|
|
|
class BaselineError(MigrateError):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Baseline error class
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
|
2017-02-14 20:26:09 +03:00
|
|
|
|
2019-11-02 02:31:24 +03:00
|
|
|
def get_conn_id(conn):
|
|
|
|
"""
|
|
|
|
Extract application_name from dsn
|
|
|
|
"""
|
|
|
|
parsed = parse_dsn(conn.dsn)
|
|
|
|
return parsed['application_name']
|
|
|
|
|
|
|
|
|
2017-12-03 01:39:30 +03:00
|
|
|
class ConflictTerminator(threading.Thread):
|
|
|
|
"""
|
|
|
|
Kills conflicting pids (only on postgresql > 9.6)
|
|
|
|
"""
|
2022-11-19 12:20:32 +03:00
|
|
|
|
2017-12-03 01:39:30 +03:00
|
|
|
def __init__(self, conn_str, interval):
|
|
|
|
threading.Thread.__init__(self, name='terminator')
|
|
|
|
self.daemon = True
|
|
|
|
self.log = logging.getLogger('terminator')
|
|
|
|
self.conn_str = conn_str
|
2019-11-02 02:31:24 +03:00
|
|
|
self.conns = set()
|
2017-12-03 01:39:30 +03:00
|
|
|
self.interval = interval
|
|
|
|
self.should_run = True
|
|
|
|
self.conn = None
|
|
|
|
|
|
|
|
def stop(self):
|
|
|
|
"""
|
|
|
|
Stop iterations and close connection
|
|
|
|
"""
|
|
|
|
self.should_run = False
|
|
|
|
|
|
|
|
def add_conn(self, conn):
|
|
|
|
"""
|
|
|
|
Add conn pid to pgmirate pids list
|
|
|
|
"""
|
2019-11-02 02:31:24 +03:00
|
|
|
self.conns.add(get_conn_id(conn))
|
2017-12-03 01:39:30 +03:00
|
|
|
|
|
|
|
def remove_conn(self, conn):
|
|
|
|
"""
|
|
|
|
Remove conn from pgmigrate pids list
|
|
|
|
"""
|
2019-11-02 02:31:24 +03:00
|
|
|
self.conns.remove(get_conn_id(conn))
|
2017-12-03 01:39:30 +03:00
|
|
|
|
|
|
|
def run(self):
|
|
|
|
"""
|
|
|
|
Periodically terminate all backends blocking pgmigrate pids
|
|
|
|
"""
|
|
|
|
self.conn = _create_raw_connection(self.conn_str, self.log)
|
|
|
|
self.conn.autocommit = True
|
|
|
|
while self.should_run:
|
|
|
|
with self.conn.cursor() as cursor:
|
2019-11-02 02:31:24 +03:00
|
|
|
for conn_id in self.conns:
|
2017-12-03 01:39:30 +03:00
|
|
|
cursor.execute(
|
2019-11-02 02:31:24 +03:00
|
|
|
"""
|
|
|
|
SELECT b.blocking_pid,
|
|
|
|
pg_terminate_backend(b.blocking_pid)
|
|
|
|
FROM (SELECT unnest(pg_blocking_pids(pid))
|
|
|
|
AS blocking_pid
|
|
|
|
FROM pg_stat_activity
|
|
|
|
WHERE application_name
|
|
|
|
LIKE '%%' || %s || '%%') as b
|
|
|
|
""", (conn_id, ))
|
2017-12-03 18:03:08 +03:00
|
|
|
terminated = [x[0] for x in cursor.fetchall()]
|
|
|
|
for i in terminated:
|
|
|
|
self.log.info('Terminated conflicting pid: %s', i)
|
2017-12-03 01:39:30 +03:00
|
|
|
time.sleep(self.interval)
|
|
|
|
|
|
|
|
|
2018-08-18 22:34:31 +03:00
|
|
|
REF_COLUMNS = [
|
|
|
|
'version',
|
|
|
|
'description',
|
|
|
|
'type',
|
|
|
|
'installed_by',
|
|
|
|
'installed_on',
|
|
|
|
]
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
2017-12-03 01:39:30 +03:00
|
|
|
def _create_raw_connection(conn_string, logger=LOG):
|
2016-10-03 17:21:57 +03:00
|
|
|
conn = psycopg2.connect(conn_string, connection_factory=LoggingConnection)
|
2017-12-03 01:39:30 +03:00
|
|
|
conn.initialize(logger)
|
|
|
|
|
|
|
|
return conn
|
|
|
|
|
|
|
|
|
|
|
|
def _create_connection(config):
|
2019-11-02 02:31:24 +03:00
|
|
|
conn_id = 'pgmigrate-{id}'.format(id=str(uuid.uuid4()))
|
2020-05-14 18:25:57 +03:00
|
|
|
conn = _create_raw_connection(
|
|
|
|
make_dsn(config.conn, application_name=conn_id))
|
2017-12-03 01:39:30 +03:00
|
|
|
if config.terminator_instance:
|
|
|
|
config.terminator_instance.add_conn(conn)
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
return conn
|
|
|
|
|
|
|
|
|
2017-04-01 22:11:47 +03:00
|
|
|
def _init_cursor(conn, session):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2017-04-01 22:11:47 +03:00
|
|
|
Get cursor initialized with session commands
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2017-04-01 22:11:47 +03:00
|
|
|
cursor = conn.cursor()
|
|
|
|
for query in session:
|
|
|
|
cursor.execute(query)
|
|
|
|
LOG.info(cursor.statusmessage)
|
|
|
|
|
|
|
|
return cursor
|
|
|
|
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
def _is_initialized(schema, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Check that database is initialized
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2018-08-18 22:34:31 +03:00
|
|
|
cursor.execute(
|
|
|
|
'SELECT EXISTS(SELECT 1 FROM '
|
|
|
|
'information_schema.tables '
|
|
|
|
'WHERE table_schema = %s '
|
2020-03-27 03:55:22 +03:00
|
|
|
'AND table_name = %s)', (schema, 'schema_version'))
|
2016-10-03 17:21:57 +03:00
|
|
|
table_exists = cursor.fetchone()[0]
|
|
|
|
|
|
|
|
if not table_exists:
|
|
|
|
return False
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
cursor.execute(
|
|
|
|
SQL('SELECT * from {schema}.schema_version limit 1').format(
|
|
|
|
schema=Identifier(schema)))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
colnames = [desc[0] for desc in cursor.description]
|
|
|
|
|
|
|
|
if colnames != REF_COLUMNS:
|
2017-12-02 21:13:31 +03:00
|
|
|
raise MalformedSchema(
|
2020-03-27 03:55:22 +03:00
|
|
|
('Table {schema}.schema_version has unexpected '
|
|
|
|
'structure: {struct}').format(schema=Identifier(schema),
|
|
|
|
struct='|'.join(colnames)))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
2017-02-14 20:26:09 +03:00
|
|
|
|
2018-08-18 22:34:31 +03:00
|
|
|
MIGRATION_FILE_RE = re.compile(r'V(?P<version>\d+)__(?P<description>.+)\.sql$')
|
2016-10-03 17:21:57 +03:00
|
|
|
|
2017-12-02 21:44:49 +03:00
|
|
|
MigrationInfo = namedtuple('MigrationInfo', ('meta', 'file_path'))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
2018-08-18 22:34:31 +03:00
|
|
|
Callbacks = namedtuple('Callbacks',
|
|
|
|
('beforeAll', 'beforeEach', 'afterEach', 'afterAll'))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
2021-12-29 00:42:44 +03:00
|
|
|
Config = namedtuple('Config',
|
|
|
|
('target', 'baseline', 'cursor', 'dryrun', 'callbacks',
|
|
|
|
'user', 'base_dir', 'conn', 'session', 'conn_instance',
|
|
|
|
'terminator_instance', 'termination_interval', 'schema',
|
|
|
|
'disable_schema_check', 'check_serial_versions'))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
2017-12-03 01:39:30 +03:00
|
|
|
CONFIG_IGNORE = ['cursor', 'conn_instance', 'terminator_instance']
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
2018-04-03 06:03:44 +03:00
|
|
|
def _get_files_from_dir(path):
|
|
|
|
"""
|
|
|
|
Get all files in all subdirs in path
|
|
|
|
"""
|
|
|
|
for root, _, files in os.walk(path):
|
|
|
|
for fname in files:
|
2018-04-03 15:06:38 +03:00
|
|
|
yield os.path.basename(fname), os.path.join(root, fname)
|
2018-04-03 06:03:44 +03:00
|
|
|
|
|
|
|
|
2016-10-03 17:21:57 +03:00
|
|
|
def _get_migrations_info_from_dir(base_dir):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Get all migrations from base dir
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
path = os.path.join(base_dir, 'migrations')
|
|
|
|
migrations = {}
|
2017-12-02 21:44:49 +03:00
|
|
|
if not (os.path.exists(path) and os.path.isdir(path)):
|
|
|
|
raise ConfigurationError(
|
|
|
|
'Migrations dir not found (expected to be {path})'.format(
|
|
|
|
path=path))
|
2018-04-03 06:03:44 +03:00
|
|
|
for fname, file_path in _get_files_from_dir(path):
|
2017-12-02 21:44:49 +03:00
|
|
|
match = MIGRATION_FILE_RE.match(fname)
|
|
|
|
if match is None:
|
2018-08-18 22:34:31 +03:00
|
|
|
LOG.warning('File %s does not match by pattern %s. Skipping it.',
|
|
|
|
file_path, MIGRATION_FILE_RE.pattern)
|
2017-12-02 21:44:49 +03:00
|
|
|
continue
|
|
|
|
version = int(match.group('version'))
|
2023-02-03 23:31:05 +03:00
|
|
|
ret = {
|
|
|
|
'version': version,
|
|
|
|
'type': 'auto',
|
|
|
|
'installed_by': None,
|
|
|
|
'installed_on': None,
|
|
|
|
'description': match.group('description').replace('_', ' '),
|
|
|
|
}
|
2017-12-02 21:44:49 +03:00
|
|
|
ret['transactional'] = 'NONTRANSACTIONAL' not in ret['description']
|
|
|
|
migration = MigrationInfo(
|
|
|
|
ret,
|
|
|
|
file_path,
|
|
|
|
)
|
|
|
|
if version in migrations:
|
2018-08-18 22:34:31 +03:00
|
|
|
raise MalformedMigration(
|
|
|
|
('Found migrations with same version: {version} '
|
|
|
|
'\nfirst : {first_path}'
|
|
|
|
'\nsecond: {second_path}').format(
|
|
|
|
version=version,
|
|
|
|
first_path=migration.file_path,
|
|
|
|
second_path=migrations[version].file_path))
|
2017-12-02 21:44:49 +03:00
|
|
|
migrations[version] = migration
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
return migrations
|
|
|
|
|
|
|
|
|
|
|
|
def _get_migrations_info(base_dir, baseline_v, target_v):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Get migrations from baseline to target from base dir
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
migrations = {}
|
2017-04-01 15:50:39 +03:00
|
|
|
target = target_v if target_v is not None else float('inf')
|
|
|
|
|
2016-10-03 17:21:57 +03:00
|
|
|
for version, ret in _get_migrations_info_from_dir(base_dir).items():
|
2018-08-18 22:34:31 +03:00
|
|
|
if baseline_v < version <= target:
|
2016-10-03 17:21:57 +03:00
|
|
|
migrations[version] = ret.meta
|
|
|
|
else:
|
|
|
|
LOG.info(
|
|
|
|
'Ignore migration %r cause baseline: %r or target: %r',
|
2018-08-18 22:34:31 +03:00
|
|
|
ret,
|
|
|
|
baseline_v,
|
|
|
|
target,
|
2016-10-03 17:21:57 +03:00
|
|
|
)
|
|
|
|
return migrations
|
|
|
|
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
def _get_info(base_dir, baseline_v, target_v, schema, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Get migrations info from database and base dir
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
ret = {}
|
2020-03-27 03:55:22 +03:00
|
|
|
cursor.execute(
|
|
|
|
SQL('SELECT {columns} FROM {schema}.schema_version').format(
|
|
|
|
schema=Identifier(schema),
|
|
|
|
columns=SQL(', ').join([Identifier(x) for x in REF_COLUMNS])))
|
2016-10-03 17:21:57 +03:00
|
|
|
for i in cursor.fetchall():
|
|
|
|
version = {}
|
|
|
|
for j in enumerate(REF_COLUMNS):
|
|
|
|
if j[1] == 'installed_on':
|
|
|
|
version[j[1]] = i[j[0]].strftime('%F %H:%M:%S')
|
|
|
|
else:
|
|
|
|
version[j[1]] = i[j[0]]
|
|
|
|
version['version'] = int(version['version'])
|
|
|
|
transactional = 'NONTRANSACTIONAL' not in version['description']
|
|
|
|
version['transactional'] = transactional
|
|
|
|
ret[version['version']] = version
|
|
|
|
|
|
|
|
baseline_v = max(baseline_v, sorted(ret.keys())[-1])
|
2017-04-03 17:04:46 +03:00
|
|
|
|
|
|
|
migrations_info = _get_migrations_info(base_dir, baseline_v, target_v)
|
|
|
|
for version in migrations_info:
|
|
|
|
num = migrations_info[version]['version']
|
|
|
|
if num not in ret:
|
|
|
|
ret[num] = migrations_info[version]
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
2017-04-01 14:20:13 +03:00
|
|
|
def _get_database_user(cursor):
|
|
|
|
cursor.execute('SELECT CURRENT_USER')
|
|
|
|
return cursor.fetchone()[0]
|
|
|
|
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
def _get_state(base_dir, baseline_v, target, schema, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Get info wrapper (able to handle noninitialized database)
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2020-03-27 03:55:22 +03:00
|
|
|
if _is_initialized(schema, cursor):
|
|
|
|
return _get_info(base_dir, baseline_v, target, schema, cursor)
|
2017-12-02 21:13:31 +03:00
|
|
|
return _get_migrations_info(base_dir, baseline_v, target)
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
def _set_baseline(baseline_v, user, schema, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Cleanup schema_version and set baseline
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2018-08-18 22:34:31 +03:00
|
|
|
cursor.execute(
|
2020-03-27 03:55:22 +03:00
|
|
|
SQL('SELECT EXISTS(SELECT 1 FROM {schema}'
|
|
|
|
'.schema_version WHERE version >= %s::bigint)').format(
|
|
|
|
schema=Identifier(schema)), (baseline_v, ))
|
2016-10-03 17:21:57 +03:00
|
|
|
check_failed = cursor.fetchone()[0]
|
|
|
|
|
|
|
|
if check_failed:
|
2017-12-02 21:13:31 +03:00
|
|
|
raise BaselineError(
|
|
|
|
'Unable to baseline, version '
|
|
|
|
'{version} already applied'.format(version=text(baseline_v)))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
LOG.info('cleaning up table schema_version')
|
2020-03-27 03:55:22 +03:00
|
|
|
cursor.execute(
|
|
|
|
SQL('DELETE FROM {schema}.schema_version').format(
|
|
|
|
schema=Identifier(schema)))
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info(cursor.statusmessage)
|
|
|
|
|
|
|
|
LOG.info('setting baseline')
|
2018-08-18 22:34:31 +03:00
|
|
|
cursor.execute(
|
2020-03-27 03:55:22 +03:00
|
|
|
SQL('INSERT INTO {schema}.schema_version '
|
|
|
|
'(version, type, description, installed_by) '
|
|
|
|
'VALUES (%s::bigint, %s, %s, %s)').format(
|
|
|
|
schema=Identifier(schema)),
|
2018-08-18 22:34:31 +03:00
|
|
|
(text(baseline_v), 'manual', 'Forced baseline', user))
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info(cursor.statusmessage)
|
|
|
|
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
def _init_schema(schema, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Create schema_version table
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2020-03-27 03:55:22 +03:00
|
|
|
cursor.execute(
|
2021-07-10 00:30:14 +03:00
|
|
|
'SELECT EXISTS(SELECT 1 FROM '
|
|
|
|
'information_schema.schemata '
|
|
|
|
'WHERE schema_name = %s)', (schema, ))
|
|
|
|
schema_exists = cursor.fetchone()[0]
|
|
|
|
if not schema_exists:
|
|
|
|
LOG.info('creating schema')
|
|
|
|
cursor.execute(
|
|
|
|
SQL('CREATE SCHEMA IF NOT EXISTS {schema}').format(
|
|
|
|
schema=Identifier(schema)))
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info('creating type schema_version_type')
|
2018-08-18 22:34:31 +03:00
|
|
|
cursor.execute(
|
2020-03-27 03:55:22 +03:00
|
|
|
SQL('CREATE TYPE {schema}.schema_version_type '
|
|
|
|
'AS ENUM (%s, %s)').format(schema=Identifier(schema)),
|
|
|
|
('auto', 'manual'))
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info(cursor.statusmessage)
|
|
|
|
LOG.info('creating table schema_version')
|
2018-08-18 22:34:31 +03:00
|
|
|
cursor.execute(
|
2020-03-27 03:55:22 +03:00
|
|
|
SQL('CREATE TABLE {schema}.schema_version ('
|
|
|
|
'version BIGINT NOT NULL PRIMARY KEY, '
|
|
|
|
'description TEXT NOT NULL, '
|
|
|
|
'type {schema}.schema_version_type NOT NULL '
|
|
|
|
'DEFAULT %s, '
|
|
|
|
'installed_by TEXT NOT NULL, '
|
|
|
|
'installed_on TIMESTAMP WITHOUT time ZONE '
|
|
|
|
'DEFAULT now() NOT NULL)').format(schema=Identifier(schema)),
|
|
|
|
('auto', ))
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info(cursor.statusmessage)
|
|
|
|
|
|
|
|
|
|
|
|
def _get_statements(path):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Get statements from file
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
with codecs.open(path, encoding='utf-8') as i:
|
|
|
|
data = i.read()
|
|
|
|
if u'/* pgmigrate-encoding: utf-8 */' not in data:
|
|
|
|
try:
|
|
|
|
data.encode('ascii')
|
|
|
|
except UnicodeError as exc:
|
|
|
|
raise MalformedStatement(
|
2018-08-18 22:34:31 +03:00
|
|
|
'Non ascii symbols in file: {0}, {1}'.format(path, text(exc)))
|
2017-04-06 01:14:17 +03:00
|
|
|
data = sqlparse.format(data, strip_comments=True)
|
2016-10-03 17:21:57 +03:00
|
|
|
for statement in sqlparse.parsestream(data, encoding='utf-8'):
|
|
|
|
st_str = text(statement).strip().encode('utf-8')
|
|
|
|
if st_str:
|
|
|
|
yield st_str
|
|
|
|
|
|
|
|
|
2021-08-07 11:01:55 +03:00
|
|
|
def _apply_statement(statement, file_path, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Execute statement using cursor
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
try:
|
2017-03-30 17:07:15 +03:00
|
|
|
cursor.execute(statement)
|
2016-10-03 17:21:57 +03:00
|
|
|
except psycopg2.Error as exc:
|
2021-08-07 11:01:55 +03:00
|
|
|
LOG.error('Error executing statement from %s:', file_path)
|
2016-10-03 17:21:57 +03:00
|
|
|
for line in statement.splitlines():
|
|
|
|
LOG.error(line)
|
|
|
|
LOG.error(exc)
|
|
|
|
raise MigrateError('Unable to apply statement')
|
|
|
|
|
|
|
|
|
|
|
|
def _apply_file(file_path, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Execute all statements in file
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
try:
|
|
|
|
for statement in _get_statements(file_path):
|
2021-08-07 11:01:55 +03:00
|
|
|
_apply_statement(statement, file_path, cursor)
|
2016-10-03 17:21:57 +03:00
|
|
|
except MalformedStatement as exc:
|
|
|
|
LOG.error(exc)
|
|
|
|
raise exc
|
|
|
|
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
def _apply_version(version, base_dir, user, schema, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Execute all statements in migration version
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
all_versions = _get_migrations_info_from_dir(base_dir)
|
|
|
|
version_info = all_versions[version]
|
|
|
|
LOG.info('Try apply version %r', version_info)
|
|
|
|
|
2017-12-02 21:44:49 +03:00
|
|
|
_apply_file(version_info.file_path, cursor)
|
2018-08-18 22:34:31 +03:00
|
|
|
cursor.execute(
|
2020-03-27 03:55:22 +03:00
|
|
|
SQL('INSERT INTO {schema}.schema_version '
|
|
|
|
'(version, description, installed_by) '
|
|
|
|
'VALUES (%s::bigint, %s, %s)').format(schema=Identifier(schema)),
|
2018-08-18 22:34:31 +03:00
|
|
|
(text(version), version_info.meta['description'], user))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
|
|
|
def _parse_str_callbacks(callbacks, ret, base_dir):
|
2020-10-20 17:47:20 +03:00
|
|
|
if not callbacks:
|
|
|
|
return ret
|
2016-10-03 17:21:57 +03:00
|
|
|
callbacks = callbacks.split(',')
|
|
|
|
for callback in callbacks:
|
|
|
|
if not callback:
|
|
|
|
continue
|
|
|
|
tokens = callback.split(':')
|
|
|
|
if tokens[0] not in ret._fields:
|
2017-12-02 21:44:49 +03:00
|
|
|
raise ConfigurationError(
|
|
|
|
'Unexpected callback '
|
|
|
|
'type: {type}'.format(type=text(tokens[0])))
|
2016-10-03 17:21:57 +03:00
|
|
|
path = os.path.join(base_dir, tokens[1])
|
|
|
|
if not os.path.exists(path):
|
2017-12-02 21:44:49 +03:00
|
|
|
raise ConfigurationError(
|
2017-12-02 21:13:31 +03:00
|
|
|
'Path unavailable: {path}'.format(path=text(path)))
|
2016-10-03 17:21:57 +03:00
|
|
|
if os.path.isdir(path):
|
|
|
|
for fname in sorted(os.listdir(path)):
|
|
|
|
getattr(ret, tokens[0]).append(os.path.join(path, fname))
|
|
|
|
else:
|
|
|
|
getattr(ret, tokens[0]).append(path)
|
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
|
def _parse_dict_callbacks(callbacks, ret, base_dir):
|
|
|
|
for i in callbacks:
|
|
|
|
if i in ret._fields:
|
2020-10-20 17:14:21 +03:00
|
|
|
for j in callbacks[i] or []:
|
2016-10-03 17:21:57 +03:00
|
|
|
path = os.path.join(base_dir, j)
|
|
|
|
if not os.path.exists(path):
|
2017-12-02 21:44:49 +03:00
|
|
|
raise ConfigurationError(
|
2017-12-02 21:13:31 +03:00
|
|
|
'Path unavailable: {path}'.format(path=text(path)))
|
2016-10-03 17:21:57 +03:00
|
|
|
if os.path.isdir(path):
|
|
|
|
for fname in sorted(os.listdir(path)):
|
|
|
|
getattr(ret, i).append(os.path.join(path, fname))
|
|
|
|
else:
|
|
|
|
getattr(ret, i).append(path)
|
|
|
|
else:
|
2017-12-02 21:44:49 +03:00
|
|
|
raise ConfigurationError(
|
2017-12-02 21:13:31 +03:00
|
|
|
'Unexpected callback type: {type}'.format(type=text(i)))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
return ret
|
|
|
|
|
|
|
|
|
|
|
|
def _get_callbacks(callbacks, base_dir=''):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Parse cmdline/config callbacks
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2018-08-18 22:34:31 +03:00
|
|
|
ret = Callbacks(beforeAll=[], beforeEach=[], afterEach=[], afterAll=[])
|
2016-10-03 17:21:57 +03:00
|
|
|
if isinstance(callbacks, dict):
|
|
|
|
return _parse_dict_callbacks(callbacks, ret, base_dir)
|
2017-12-02 21:13:31 +03:00
|
|
|
return _parse_str_callbacks(callbacks, ret, base_dir)
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
def _migrate_step(state, callbacks, base_dir, user, schema, cursor):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Apply one version with callbacks
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
before_all_executed = False
|
|
|
|
should_migrate = False
|
2020-03-27 03:55:22 +03:00
|
|
|
if not _is_initialized(schema, cursor):
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info('schema not initialized')
|
2020-03-27 03:55:22 +03:00
|
|
|
_init_schema(schema, cursor)
|
2016-10-03 17:21:57 +03:00
|
|
|
for version in sorted(state.keys()):
|
|
|
|
LOG.debug('has version %r', version)
|
|
|
|
if state[version]['installed_on'] is None:
|
|
|
|
should_migrate = True
|
|
|
|
if not before_all_executed and callbacks.beforeAll:
|
|
|
|
LOG.info('Executing beforeAll callbacks:')
|
|
|
|
for callback in callbacks.beforeAll:
|
|
|
|
_apply_file(callback, cursor)
|
|
|
|
LOG.info(callback)
|
|
|
|
before_all_executed = True
|
|
|
|
|
|
|
|
LOG.info('Migrating to version %d', version)
|
|
|
|
if callbacks.beforeEach:
|
|
|
|
LOG.info('Executing beforeEach callbacks:')
|
|
|
|
for callback in callbacks.beforeEach:
|
|
|
|
LOG.info(callback)
|
|
|
|
_apply_file(callback, cursor)
|
|
|
|
|
2020-03-27 03:55:22 +03:00
|
|
|
_apply_version(version, base_dir, user, schema, cursor)
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
if callbacks.afterEach:
|
|
|
|
LOG.info('Executing afterEach callbacks:')
|
|
|
|
for callback in callbacks.afterEach:
|
|
|
|
LOG.info(callback)
|
|
|
|
_apply_file(callback, cursor)
|
|
|
|
|
|
|
|
if should_migrate and callbacks.afterAll:
|
|
|
|
LOG.info('Executing afterAll callbacks:')
|
|
|
|
for callback in callbacks.afterAll:
|
|
|
|
LOG.info(callback)
|
|
|
|
_apply_file(callback, cursor)
|
|
|
|
|
|
|
|
|
|
|
|
def _finish(config):
|
|
|
|
if config.dryrun:
|
|
|
|
config.cursor.execute('rollback')
|
|
|
|
else:
|
|
|
|
config.cursor.execute('commit')
|
2017-12-03 01:39:30 +03:00
|
|
|
if config.terminator_instance:
|
|
|
|
config.terminator_instance.stop()
|
|
|
|
config.conn_instance.close()
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
|
|
|
def info(config, stdout=True):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Info cmdline wrapper
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2018-08-18 22:34:31 +03:00
|
|
|
state = _get_state(config.base_dir, config.baseline, config.target,
|
2020-03-27 03:55:22 +03:00
|
|
|
config.schema, config.cursor)
|
2016-10-03 17:21:57 +03:00
|
|
|
if stdout:
|
2017-03-31 13:38:56 +03:00
|
|
|
out_state = OrderedDict()
|
|
|
|
for version in sorted(state, key=int):
|
|
|
|
out_state[version] = state[version]
|
2016-10-03 17:21:57 +03:00
|
|
|
sys.stdout.write(
|
2017-03-31 13:38:56 +03:00
|
|
|
json.dumps(out_state, indent=4, separators=(',', ': ')) + '\n')
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
_finish(config)
|
|
|
|
|
|
|
|
return state
|
|
|
|
|
|
|
|
|
|
|
|
def clean(config):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Drop schema_version table
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2020-03-27 03:55:22 +03:00
|
|
|
if _is_initialized(config.schema, config.cursor):
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info('dropping schema_version')
|
2020-03-27 03:55:22 +03:00
|
|
|
config.cursor.execute(
|
|
|
|
SQL('DROP TABLE {schema}.schema_version').format(
|
|
|
|
schema=Identifier(config.schema)))
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info(config.cursor.statusmessage)
|
|
|
|
LOG.info('dropping schema_version_type')
|
2020-03-27 03:55:22 +03:00
|
|
|
config.cursor.execute(
|
|
|
|
SQL('DROP TYPE {schema}.schema_version_type').format(
|
|
|
|
schema=Identifier(config.schema)))
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.info(config.cursor.statusmessage)
|
|
|
|
_finish(config)
|
|
|
|
|
|
|
|
|
|
|
|
def baseline(config):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Set baseline cmdline wrapper
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2020-03-27 03:55:22 +03:00
|
|
|
if not _is_initialized(config.schema, config.cursor):
|
|
|
|
_init_schema(config.schema, config.cursor)
|
|
|
|
_set_baseline(config.baseline, config.user, config.schema, config.cursor)
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
_finish(config)
|
|
|
|
|
|
|
|
|
|
|
|
def _prepare_nontransactional_steps(state, callbacks):
|
|
|
|
steps = []
|
2018-08-18 22:34:31 +03:00
|
|
|
i = {'state': {}, 'cbs': _get_callbacks('')}
|
2016-10-03 17:21:57 +03:00
|
|
|
for version in sorted(state):
|
|
|
|
if not state[version]['transactional']:
|
|
|
|
if i['state']:
|
|
|
|
steps.append(i)
|
2018-08-18 22:34:31 +03:00
|
|
|
i = {'state': {}, 'cbs': _get_callbacks('')}
|
2017-12-02 21:13:31 +03:00
|
|
|
elif not steps:
|
2016-10-03 17:21:57 +03:00
|
|
|
LOG.error('First migration MUST be transactional')
|
|
|
|
raise MalformedMigration('First migration MUST '
|
|
|
|
'be transactional')
|
2018-08-18 22:34:31 +03:00
|
|
|
steps.append({
|
|
|
|
'state': {
|
|
|
|
version: state[version],
|
|
|
|
},
|
|
|
|
'cbs': _get_callbacks(''),
|
|
|
|
})
|
2016-10-03 17:21:57 +03:00
|
|
|
else:
|
|
|
|
i['state'][version] = state[version]
|
|
|
|
i['cbs'] = callbacks
|
|
|
|
|
|
|
|
if i['state']:
|
|
|
|
steps.append(i)
|
|
|
|
|
2017-04-05 16:08:43 +03:00
|
|
|
transactional = []
|
2016-10-03 17:21:57 +03:00
|
|
|
for (num, step) in enumerate(steps):
|
2017-04-05 16:08:43 +03:00
|
|
|
if list(step['state'].values())[0]['transactional']:
|
|
|
|
transactional.append(num)
|
|
|
|
|
|
|
|
if len(transactional) > 1:
|
|
|
|
for num in transactional[1:]:
|
|
|
|
steps[num]['cbs'] = steps[num]['cbs']._replace(beforeAll=[])
|
|
|
|
for num in transactional[:-1]:
|
|
|
|
steps[num]['cbs'] = steps[num]['cbs']._replace(afterAll=[])
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
LOG.info('Initialization plan result:\n %s',
|
|
|
|
json.dumps(steps, indent=4, separators=(',', ': ')))
|
|
|
|
|
|
|
|
return steps
|
|
|
|
|
|
|
|
|
2017-12-03 01:39:30 +03:00
|
|
|
def _execute_mixed_steps(config, steps, nt_conn):
|
|
|
|
commit_req = False
|
|
|
|
for step in steps:
|
|
|
|
if commit_req:
|
|
|
|
config.cursor.execute('commit')
|
|
|
|
commit_req = False
|
|
|
|
if not list(step['state'].values())[0]['transactional']:
|
|
|
|
cur = _init_cursor(nt_conn, config.session)
|
|
|
|
else:
|
|
|
|
cur = config.cursor
|
|
|
|
commit_req = True
|
2018-08-18 22:34:31 +03:00
|
|
|
_migrate_step(step['state'], step['cbs'], config.base_dir, config.user,
|
2020-03-27 03:55:22 +03:00
|
|
|
config.schema, cur)
|
|
|
|
|
|
|
|
|
|
|
|
def _schema_check(schema, cursor):
|
|
|
|
"""
|
|
|
|
Check that only one schema used in migrations
|
|
|
|
"""
|
|
|
|
cursor.execute(
|
|
|
|
'SELECT n.nspname, c.relname FROM pg_locks l JOIN pg_class c ON '
|
|
|
|
'(l.relation=c.oid) JOIN pg_namespace n ON '
|
|
|
|
'(c.relnamespace=n.oid) WHERE l.pid = pg_backend_pid() '
|
|
|
|
"AND n.nspname !~ '^pg_' AND n.nspname <> 'information_schema'")
|
|
|
|
|
|
|
|
unexpected = set()
|
|
|
|
for namespace, relation in cursor.fetchall():
|
|
|
|
if namespace != schema:
|
|
|
|
unexpected.add('.'.join((namespace, relation)))
|
|
|
|
|
|
|
|
if unexpected:
|
|
|
|
raise MigrateError(
|
|
|
|
'Unexpected relations used in migrations: {used}'.format(
|
|
|
|
used=(', '.join(sorted(unexpected)))))
|
2017-12-03 01:39:30 +03:00
|
|
|
|
|
|
|
|
2021-12-29 00:42:44 +03:00
|
|
|
def _check_serial_versions(state, not_applied):
|
|
|
|
"""
|
|
|
|
Check that there are no gaps in migration versions
|
|
|
|
"""
|
|
|
|
applied = [x for x in state if state[x]['installed_on'] is not None]
|
|
|
|
sorted_versions = sorted(not_applied)
|
|
|
|
if applied:
|
|
|
|
sorted_versions.insert(0, max(applied))
|
|
|
|
first = sorted_versions[0]
|
|
|
|
last = sorted_versions[-1]
|
|
|
|
if last - first + 1 != len(sorted_versions):
|
|
|
|
versions = set(sorted_versions)
|
|
|
|
missing = [str(x) for x in range(first, last) if x not in versions]
|
|
|
|
raise MigrateError(
|
|
|
|
'Migration versions have gaps: missing versions {versions}'.format(
|
2023-02-03 23:31:05 +03:00
|
|
|
versions=', '.join(missing)))
|
2021-12-29 00:42:44 +03:00
|
|
|
|
|
|
|
|
2016-10-03 17:21:57 +03:00
|
|
|
def migrate(config):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Migrate cmdline wrapper
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
if config.target is None:
|
2017-04-01 13:05:14 +03:00
|
|
|
LOG.error('Unknown target (you could use "latest" to '
|
2017-04-01 22:11:47 +03:00
|
|
|
'use latest available version)')
|
2016-10-03 17:21:57 +03:00
|
|
|
raise MigrateError('Unknown target')
|
2017-04-01 22:11:47 +03:00
|
|
|
|
2018-08-18 22:34:31 +03:00
|
|
|
state = _get_state(config.base_dir, config.baseline, config.target,
|
2020-03-27 03:55:22 +03:00
|
|
|
config.schema, config.cursor)
|
2016-10-03 17:21:57 +03:00
|
|
|
not_applied = [x for x in state if state[x]['installed_on'] is None]
|
|
|
|
non_trans = [x for x in not_applied if not state[x]['transactional']]
|
|
|
|
|
2021-12-29 00:42:44 +03:00
|
|
|
if not_applied and config.check_serial_versions:
|
|
|
|
_check_serial_versions(state, not_applied)
|
|
|
|
|
2017-12-02 21:13:31 +03:00
|
|
|
if non_trans:
|
2020-03-27 03:55:22 +03:00
|
|
|
if not config.disable_schema_check:
|
|
|
|
raise MigrateError(
|
|
|
|
'Schema check is not available for nontransactional '
|
|
|
|
'migrations')
|
2016-10-03 17:21:57 +03:00
|
|
|
if config.dryrun:
|
2020-03-27 03:55:22 +03:00
|
|
|
LOG.error('Dry run for nontransactional migrations is nonsence')
|
2016-10-03 17:21:57 +03:00
|
|
|
raise MigrateError('Dry run for nontransactional migrations '
|
|
|
|
'is nonsence')
|
|
|
|
if len(state) != len(not_applied):
|
|
|
|
if len(not_applied) != len(non_trans):
|
|
|
|
LOG.error('Unable to mix transactional and '
|
|
|
|
'nontransactional migrations')
|
|
|
|
raise MigrateError('Unable to mix transactional and '
|
|
|
|
'nontransactional migrations')
|
2017-12-02 21:13:31 +03:00
|
|
|
config.cursor.execute('rollback')
|
2017-12-03 01:39:30 +03:00
|
|
|
with closing(_create_connection(config)) as nt_conn:
|
|
|
|
nt_conn.autocommit = True
|
|
|
|
cursor = _init_cursor(nt_conn, config.session)
|
2018-08-18 22:34:31 +03:00
|
|
|
_migrate_step(state, _get_callbacks(''), config.base_dir,
|
2020-03-27 03:55:22 +03:00
|
|
|
config.user, config.schema, cursor)
|
2017-12-03 01:39:30 +03:00
|
|
|
if config.terminator_instance:
|
|
|
|
config.terminator_instance.remove_conn(nt_conn)
|
2016-10-03 17:21:57 +03:00
|
|
|
else:
|
|
|
|
steps = _prepare_nontransactional_steps(state, config.callbacks)
|
|
|
|
|
2017-12-03 01:39:30 +03:00
|
|
|
with closing(_create_connection(config)) as nt_conn:
|
|
|
|
nt_conn.autocommit = True
|
2016-10-03 17:21:57 +03:00
|
|
|
|
2017-12-03 01:39:30 +03:00
|
|
|
_execute_mixed_steps(config, steps, nt_conn)
|
|
|
|
|
|
|
|
if config.terminator_instance:
|
|
|
|
config.terminator_instance.remove_conn(nt_conn)
|
2016-10-03 17:21:57 +03:00
|
|
|
else:
|
2018-08-18 22:34:31 +03:00
|
|
|
_migrate_step(state, config.callbacks, config.base_dir, config.user,
|
2020-03-27 03:55:22 +03:00
|
|
|
config.schema, config.cursor)
|
|
|
|
if not config.disable_schema_check:
|
|
|
|
_schema_check(config.schema, config.cursor)
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
_finish(config)
|
|
|
|
|
2017-02-14 20:26:09 +03:00
|
|
|
|
2016-10-03 17:21:57 +03:00
|
|
|
COMMANDS = {
|
|
|
|
'info': info,
|
|
|
|
'clean': clean,
|
|
|
|
'baseline': baseline,
|
|
|
|
'migrate': migrate,
|
|
|
|
}
|
|
|
|
|
2019-04-14 20:55:33 +03:00
|
|
|
CONFIG_DEFAULTS = Config(target=None,
|
|
|
|
baseline=0,
|
|
|
|
cursor=None,
|
|
|
|
dryrun=False,
|
|
|
|
callbacks='',
|
|
|
|
base_dir='',
|
|
|
|
user=None,
|
|
|
|
session=['SET lock_timeout = 0'],
|
|
|
|
conn='dbname=postgres user=postgres '
|
|
|
|
'connect_timeout=1',
|
|
|
|
conn_instance=None,
|
|
|
|
terminator_instance=None,
|
2020-03-27 03:55:22 +03:00
|
|
|
termination_interval=None,
|
|
|
|
schema=None,
|
2021-12-29 00:42:44 +03:00
|
|
|
disable_schema_check=False,
|
|
|
|
check_serial_versions=False)
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
|
|
|
|
def get_config(base_dir, args=None):
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Load configuration from yml in base dir with respect of args
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
path = os.path.join(base_dir, 'migrations.yml')
|
|
|
|
try:
|
|
|
|
with codecs.open(path, encoding='utf-8') as i:
|
2020-10-20 17:47:20 +03:00
|
|
|
base = yaml.safe_load(i) or {}
|
2016-10-03 17:21:57 +03:00
|
|
|
except IOError:
|
|
|
|
LOG.info('Unable to load %s. Using defaults', path)
|
|
|
|
base = {}
|
|
|
|
|
|
|
|
conf = CONFIG_DEFAULTS
|
|
|
|
for i in [j for j in CONFIG_DEFAULTS._fields if j not in CONFIG_IGNORE]:
|
|
|
|
if i in base:
|
|
|
|
conf = conf._replace(**{i: base[i]})
|
|
|
|
if args is not None:
|
|
|
|
if i in args.__dict__ and args.__dict__[i] is not None:
|
|
|
|
conf = conf._replace(**{i: args.__dict__[i]})
|
|
|
|
|
2017-04-01 13:05:14 +03:00
|
|
|
if conf.target is not None:
|
|
|
|
if conf.target == 'latest':
|
|
|
|
conf = conf._replace(target=float('inf'))
|
|
|
|
else:
|
|
|
|
conf = conf._replace(target=int(conf.target))
|
|
|
|
|
2017-12-03 01:39:30 +03:00
|
|
|
if conf.termination_interval and not conf.dryrun:
|
2019-04-14 20:55:33 +03:00
|
|
|
conf = conf._replace(terminator_instance=ConflictTerminator(
|
|
|
|
conf.conn, conf.termination_interval))
|
2017-12-03 01:39:30 +03:00
|
|
|
conf.terminator_instance.start()
|
|
|
|
|
|
|
|
conf = conf._replace(conn_instance=_create_connection(conf))
|
2017-04-01 22:11:47 +03:00
|
|
|
conf = conf._replace(cursor=_init_cursor(conf.conn_instance, conf.session))
|
2018-08-18 22:34:31 +03:00
|
|
|
conf = conf._replace(
|
|
|
|
callbacks=_get_callbacks(conf.callbacks, conf.base_dir))
|
2016-10-03 17:21:57 +03:00
|
|
|
|
2017-04-01 14:20:13 +03:00
|
|
|
if conf.user is None:
|
|
|
|
conf = conf._replace(user=_get_database_user(conf.cursor))
|
|
|
|
elif not conf.user:
|
2017-12-02 21:44:49 +03:00
|
|
|
raise ConfigurationError('Empty user name')
|
2020-03-27 03:55:22 +03:00
|
|
|
if conf.schema is None:
|
|
|
|
conf = conf._replace(schema='public')
|
|
|
|
conf = conf._replace(disable_schema_check=True)
|
2017-04-01 14:20:13 +03:00
|
|
|
|
2016-10-03 17:21:57 +03:00
|
|
|
return conf
|
|
|
|
|
|
|
|
|
|
|
|
def _main():
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
Main function
|
2017-12-02 21:13:31 +03:00
|
|
|
"""
|
2016-10-03 17:21:57 +03:00
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
|
2019-04-14 20:55:33 +03:00
|
|
|
parser.add_argument('cmd',
|
|
|
|
choices=COMMANDS.keys(),
|
|
|
|
type=str,
|
|
|
|
help='Operation')
|
2018-08-18 22:34:31 +03:00
|
|
|
parser.add_argument('-t', '--target', type=str, help='Target version')
|
2019-04-14 20:55:33 +03:00
|
|
|
parser.add_argument('-c',
|
|
|
|
'--conn',
|
|
|
|
type=str,
|
|
|
|
help='Postgresql connection string')
|
|
|
|
parser.add_argument('-d',
|
|
|
|
'--base_dir',
|
|
|
|
type=str,
|
|
|
|
default='',
|
|
|
|
help='Migrations base dir')
|
|
|
|
parser.add_argument('-u',
|
|
|
|
'--user',
|
|
|
|
type=str,
|
|
|
|
help='Override database user in migration info')
|
2018-08-18 22:34:31 +03:00
|
|
|
parser.add_argument('-b', '--baseline', type=int, help='Baseline version')
|
2019-04-14 20:55:33 +03:00
|
|
|
parser.add_argument('-a',
|
|
|
|
'--callbacks',
|
|
|
|
type=str,
|
|
|
|
help='Comma-separated list of callbacks '
|
|
|
|
'(type:dir/file)')
|
|
|
|
parser.add_argument('-s',
|
|
|
|
'--session',
|
|
|
|
action='append',
|
|
|
|
help='Session setup (e.g. isolation level)')
|
|
|
|
parser.add_argument('-n',
|
|
|
|
'--dryrun',
|
|
|
|
action='store_true',
|
|
|
|
help='Say "rollback" in the end instead of "commit"')
|
|
|
|
parser.add_argument('-l',
|
|
|
|
'--termination_interval',
|
|
|
|
type=float,
|
|
|
|
help='Inverval for terminating blocking pids')
|
2020-03-27 03:55:22 +03:00
|
|
|
parser.add_argument('-m', '--schema', type=str, help='Operate on schema')
|
|
|
|
parser.add_argument('--disable_schema_check',
|
|
|
|
action='store_true',
|
|
|
|
help='Do not check that all changes '
|
|
|
|
'are in selected schema')
|
2021-12-29 00:42:44 +03:00
|
|
|
parser.add_argument('--check_serial_versions',
|
|
|
|
action='store_true',
|
|
|
|
help='Check that there are no gaps '
|
|
|
|
'in migration versions')
|
2019-04-14 20:55:33 +03:00
|
|
|
parser.add_argument('-v',
|
|
|
|
'--verbose',
|
|
|
|
default=0,
|
|
|
|
action='count',
|
|
|
|
help='Be verbose')
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
2019-04-14 20:55:33 +03:00
|
|
|
logging.basicConfig(level=(logging.ERROR - 10 * (min(3, args.verbose))),
|
|
|
|
format='%(asctime)s %(levelname)-8s: %(message)s')
|
2016-10-03 17:21:57 +03:00
|
|
|
|
|
|
|
config = get_config(args.base_dir, args)
|
|
|
|
|
|
|
|
COMMANDS[args.cmd](config)
|
|
|
|
|
2017-02-14 20:26:09 +03:00
|
|
|
|
2016-10-03 17:21:57 +03:00
|
|
|
if __name__ == '__main__':
|
|
|
|
_main()
|