1
1
mirror of https://github.com/yandex/pgmigrate.git synced 2024-10-05 16:17:14 +03:00

First opensource version - PGmigrate 1.0.0

This commit is contained in:
secwall 2016-10-03 17:21:57 +03:00
commit d29ccdd3ef
49 changed files with 2443 additions and 0 deletions

54
.gitignore vendored Normal file
View File

@ -0,0 +1,54 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
# C extensions
*.so
# Distribution / packaging
.Python
env/
bin/
build/
develop-eggs/
dist/
eggs/
lib/
lib64/
parts/
sdist/
var/
*.egg-info/
.installed.cfg
*.egg
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.cache
nosetests.xml
coverage.xml
# Translations
*.mo
# Mr Developer
.mr.developer.cfg
.project
.pydevproject
# Rope
.ropeproject
# Django stuff:
*.log
*.pot
# Sphinx documentation
docs/_build/

2
.isort.cfg Normal file
View File

@ -0,0 +1,2 @@
[settings]
known_third_party=psycopg2,sqlparse,yaml

407
.pylintrc Normal file
View File

@ -0,0 +1,407 @@
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Use multiple processes to speed up Pylint.
jobs=4
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
# Allow optimization of some AST trees. This will activate a peephole AST
# optimizer, which will apply various small optimizations. For instance, it can
# be used to obtain the result of joining multiple strings with the addition
# operator. Joining a lot of strings can lead to a maximum recursion error in
# Pylint and this flag can prevent that. It has one side effect, the resulting
# AST will be different than the one from reality. This option is deprecated
# and it will be removed in Pylint 2.0.
optimize-ast=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=ungrouped-imports
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]". This option is deprecated
# and it will be removed in Pylint 2.0.
files-output=no
# Tells whether to display a full report or only the messages
reports=yes
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[BASIC]
# Good variable names which should always be accepted, separated by a comma
good-names=db,zk,i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for variable names
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for function names
function-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for argument names
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for attribute names
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for method names
method-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=100
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=optparse
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

7
.travis.yml Normal file
View File

@ -0,0 +1,7 @@
sudo: required
services:
- docker
script:
- make test

7
AUTHORS Normal file
View File

@ -0,0 +1,7 @@
(C) YANDEX LLC, 2016
People that contributed to it:
Alexander Artemenko <art@yandex-team.ru>
Alexander Klyuev <wizard@yandex-team.ru>
Evgeny Dyukov <secwall@yandex-team.ru>

31
Dockerfile Normal file
View File

@ -0,0 +1,31 @@
# vim:set ft=dockerfile:
FROM ubuntu:xenial
# explicitly set user/group IDs
RUN groupadd -r postgres --gid=999 && useradd -r -d /var/lib/postgresql -g postgres --uid=999 postgres
# make the "en_US.UTF-8" locale so postgres will be utf-8 enabled by default
RUN apt-get update && apt-get install -y locales && rm -rf /var/lib/apt/lists/* \
&& localedef -i en_US -c -f UTF-8 -A /usr/share/locale/locale.alias en_US.UTF-8
ENV LANG en_US.utf8
RUN apt-key adv --keyserver ha.pool.sks-keyservers.net --recv-keys B97B0AFCAA1A47F044F244A07FCC7D46ACCC4CF8
ENV PG_MAJOR 9.6
RUN echo 'deb http://apt.postgresql.org/pub/repos/apt/ xenial-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list
RUN apt-get update \
&& apt-get install -y postgresql-common \
sudo \
libpq-dev \
python-pip \
python3.5-dev \
python2.7-dev \
postgresql-$PG_MAJOR \
postgresql-contrib-$PG_MAJOR \
&& pip install tox
COPY ./ /dist
CMD ["/dist/run_test.sh"]

17
LICENSE Normal file
View File

@ -0,0 +1,17 @@
Copyright (c) 2016, YANDEX LLC
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose, without fee, and without a written
agreement is hereby granted, provided that the above copyright notice
and this paragraph and the following two paragraphs appear in all copies.
IN NO EVENT SHALL YANDEX LLC BE LIABLE TO ANY PARTY FOR DIRECT,
INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST
PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION,
EVEN IF YANDEX LLC HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
YANDEX LLC SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS"
BASIS, AND YANDEX LLC HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.

5
Makefile Normal file
View File

@ -0,0 +1,5 @@
.PHONY: test
test:
docker build -t pgmigrate .
docker run -t pgmigrate

36
README.md Normal file
View File

@ -0,0 +1,36 @@
# PGmigrate
PostgreSQL migrations made easy
## Install
```
pip install yandex-pgmigrate
```
## Running tests
Tests require running PostgreSQL instance with superuser (to create/drop dbs).
You could setup this yourself and use [tox](https://pypi.python.org/pypi/tox)
to start tests:
```
tox
```
Second option is to use [docker](https://www.docker.com) and make:
```
make test
```
## How to use
Complete manual is [here](doc/tutorial.md).
## Release history
* 1.0.0 (2016-10-03)
* First opensource version
## License
Distributed under the PostgreSQL license. See [LICENSE](LICENSE) for more
information.

View File

@ -0,0 +1 @@
INSERT INTO ops (op) VALUES ('afterAll 00_dummy_after_all.sql');

View File

@ -0,0 +1 @@
INSERT INTO ops (op) VALUES ('afterEach 00_dummy_after_each.sql');

View File

@ -0,0 +1,6 @@
CREATE TABLE IF NOT EXISTS ops (
seq SERIAL PRIMARY KEY,
op TEXT NOT NULL
);
INSERT INTO ops (op) VALUES ('beforeAll 00_create_database_ops.sql');

View File

@ -0,0 +1 @@
INSERT INTO ops (op) VALUES ('beforeEach 00_dummy_before_each.sql');

4
doc/foodb/grants/foo.sql Normal file
View File

@ -0,0 +1,4 @@
GRANT USAGE ON SCHEMA foo TO foo;
GRANT ALL ON ALL TABLES IN SCHEMA foo TO foo;
INSERT INTO ops (op) VALUES ('grants foo');

11
doc/foodb/migrations.yml Normal file
View File

@ -0,0 +1,11 @@
callbacks:
beforeAll:
- callbacks/beforeAll
beforeEach:
- callbacks/beforeEach
afterEach:
- callbacks/afterEach
afterAll:
- callbacks/afterAll
- grants
conn: dbname=foodb

View File

@ -0,0 +1,8 @@
CREATE SCHEMA foo;
CREATE TABLE foo.foo (
id BIGINT PRIMARY KEY,
bar TEXT NOT NULL
);
INSERT INTO ops (op) VALUES ('migration V0001__Initial_schema_foo.sql');

View File

@ -0,0 +1,3 @@
ALTER TABLE foo.foo ADD COLUMN baz BIGINT NOT NULL DEFAULT 0;
INSERT INTO ops (op) VALUES ('migration V0002__Add_baz_column_to_foo.sql');

View File

@ -0,0 +1,3 @@
CREATE INDEX CONCURRENTLY i_foo_baz ON foo.foo (baz);
INSERT INTO ops (op) VALUES ('migration V0003__NONTRANSACTIONAL_Add_index_on_baz_column.sql');

336
doc/tutorial.md Normal file
View File

@ -0,0 +1,336 @@
# PGmigrate tutorial
We'll play around with example database `foodb`.
## Base directory structure of our example
Our [Example db](foodb) migrations dir structure looks like this:
```
foodb
├── callbacks # directory with sql callbacks
│ ├── afterAll # will be executed before commit and after last migration
│ ├── afterEach # will be executed after each migration
│ ├── beforeAll # will be executed after begin and before first migration
│ └── beforeEach # will be executed before each migration
├── grants # use this dir to set special callbacks for grants
├── migrations # migrations dir
├── migrations.yml # pgmigrate configuration
```
Every sql file has special operation on table `ops`.
This will help in understanding what is going on in each pgmigrate run.
## Creating `foo` user and `foodb`
We'll need dummy user and database for our experiments.
```
postgres=# CREATE ROLE foo WITH LOGIN PASSWORD 'foo';
CREATE ROLE
postgres=# CREATE DATABASE foodb;
CREATE DATABASE
```
## Getting migrations info before first migration
```
admin@localhost foodb $ pgmigrate -t 1 info
{
"1": {
"description": "Initial schema foo",
"transactional": true,
"version": 1,
"installed_by": null,
"type": "auto",
"installed_on": null
}
}
```
Here we see json description of migrations that will be applied if
we want to get to version 1.
Let's try to check steps to apply up to version 3 but ignoring version 1:
```
admin@localhost foodb $ pgmigrate -b 1 -t 3 info
{
"2": {
"description": "Add baz column to foo",
"transactional": true,
"version": 2,
"installed_by": null,
"type": "auto",
"installed_on": null
},
"3": {
"description": "NONTRANSACTIONAL Add index on baz column",
"transactional": false,
"version": 3,
"installed_by": null,
"type": "auto",
"installed_on": null
}
}
```
## Migrating to first version
```
admin@localhost foodb $ pgmigrate -t 1 migrate
admin@localhost foodb $ echo $?
0
```
Ok. Migration applied. Let's see what is in our db now.
```
admin@localhost foodb $ psql foodb
psql (9.5.4)
Type "help" for help.
foodb=# SELECT * FROM ops;
seq | op
-----+-----------------------------------------
1 | beforeAll 00_create_database_ops.sql
2 | beforeEach 00_dummy_before_each.sql
3 | migration V0001__Initial_schema_foo.sql
4 | afterEach 00_dummy_after_each.sql
5 | afterAll 00_dummy_after_all.sql
6 | grants foo
(6 rows)
foodb=# \dt foo.foo
List of relations
Schema | Name | Type | Owner
--------+------+-------+-------
foo | foo | table | admin
(1 row)
foodb=# \dS+ foo.foo
Table "foo.foo"
Column | Type | Modifiers | Storage | Stats target | Description
--------+--------+-----------+----------+--------------+-------------
id | bigint | not null | plain | |
bar | text | not null | extended | |
Indexes:
"foo_pkey" PRIMARY KEY, btree (id)
```
Let's check if `foo` user can really do something with our new table.
```
psql "dbname=foodb user=foo password=foo host=localhost"
psql (9.5.4)
Type "help" for help.
foodb=> SELECT * FROM foo.foo;
id | bar
----+-----
(0 rows)
```
## Mixing transactional and nontransactional migrations
Let's try to go to version 3.
```
admin@localhost foodb $ pgmigrate -t 3 migrate
2016-09-29 00:14:35,402 ERROR : Unable to mix transactional and nontransactional migrations
Traceback (most recent call last):
File "/usr/local/bin/pgmigrate", line 9, in <module>
load_entry_point('yandex-pgmigrate==1.0.0', 'console_scripts', 'pgmigrate')()
File "/usr/local/lib/python2.7/dist-packages/pgmigrate.py", line 663, in _main
COMMANDS[args.cmd](config)
File "/usr/local/lib/python2.7/dist-packages/pgmigrate.py", line 549, in migrate
raise MigrateError('Unable to mix transactional and '
pgmigrate.MigrateError: Unable to mix transactional and nontransactional migrations
```
Oops! It complained. But why? The main reason for this is quite simple:
Your production databases are likely larger than test ones.
And migration to version 3 could take a lot of time.
You definitely should stop on version 2, check that everything is working fine,
and then move to version 3.
## Migrating to second version
Ok. Now let's try version 2.
```
admin@localhost foodb $ pgmigrate -t 2 migrate
admin@localhost foodb $ echo $?
0
```
Looks good. But what is in db?
```
admin@localhost foodb $ psql foodb
psql (9.5.4)
Type "help" for help.
foodb=# SELECT * FROM ops;
seq | op
-----+--------------------------------------------
1 | beforeAll 00_create_database_ops.sql
2 | beforeEach 00_dummy_before_each.sql
3 | migration V0001__Initial_schema_foo.sql
4 | afterEach 00_dummy_after_each.sql
5 | afterAll 00_dummy_after_all.sql
6 | grants foo
7 | beforeAll 00_create_database_ops.sql
8 | beforeEach 00_dummy_before_each.sql
9 | migration V0002__Add_baz_column_to_foo.sql
10 | afterEach 00_dummy_after_each.sql
11 | afterAll 00_dummy_after_all.sql
12 | grants foo
(12 rows)
foodb=# \dS+ foo.foo
Table "foo.foo"
Column | Type | Modifiers | Storage | Stats target | Description
--------+--------+--------------------+----------+--------------+-------------
id | bigint | not null | plain | |
bar | text | not null | extended | |
baz | bigint | not null default 0 | plain | |
Indexes:
"foo_pkey" PRIMARY KEY, btree (id)
```
As we can see migration steps are almost the same as in version 1.
## Migrating to version 3 with nontransactional migration
```
admin@localhost foodb $ pgmigrate -t 3 migrate
admin@localhost foodb $ echo $?
0
```
In database:
```
admin@localhost foodb $ psql foodb
psql (9.5.4)
Type "help" for help.
foodb=# SELECT * FROM ops;
seq | op
-----+---------------------------------------------------------------
1 | beforeAll 00_create_database_ops.sql
2 | beforeEach 00_dummy_before_each.sql
3 | migration V0001__Initial_schema_foo.sql
4 | afterEach 00_dummy_after_each.sql
5 | afterAll 00_dummy_after_all.sql
6 | grants foo
7 | beforeAll 00_create_database_ops.sql
8 | beforeEach 00_dummy_before_each.sql
9 | migration V0002__Add_baz_column_to_foo.sql
10 | afterEach 00_dummy_after_each.sql
11 | afterAll 00_dummy_after_all.sql
12 | grants foo
13 | migration V0003__NONTRANSACTIONAL_Add_index_on_baz_column.sql
(13 rows)
foodb=# \dS+ foo.foo
Table "foo.foo"
Column | Type | Modifiers | Storage | Stats target | Description
--------+--------+--------------------+----------+--------------+-------------
id | bigint | not null | plain | |
bar | text | not null | extended | |
baz | bigint | not null default 0 | plain | |
Indexes:
"foo_pkey" PRIMARY KEY, btree (id)
"i_foo_baz" btree (baz)
```
No callbacks were applied this time (we are trying to run the absolute
minimum of operations outside of transactions).
## Baseline
Let's suppose that you already have a database with schema on version 3.
But you have already reached this state without using pgmigrate.
How should you migrate to version 4 and so on with it?
Let's remove schema_version info from our database
```
admin@localhost foodb $ pgmigrate clean
```
Now let's check how pgmigrate will bring us to version 3:
```
admin@localhost foodb $ pgmigrate -t 3 info
{
"1": {
"description": "Initial schema foo",
"transactional": true,
"version": 1,
"installed_by": null,
"type": "auto",
"installed_on": null
},
"2": {
"description": "Add baz column to foo",
"transactional": true,
"version": 2,
"installed_by": null,
"type": "auto",
"installed_on": null
},
"3": {
"description": "NONTRANSACTIONAL Add index on baz column",
"transactional": false,
"version": 3,
"installed_by": null,
"type": "auto",
"installed_on": null
}
}
```
This looks really bad. Our migration v1 will definitely fail
(because schema `foo` already exists).
Let's tell pgmigrate that our database is already on version 3.
```
admin@localhost foodb $ pgmigrate -b 3 baseline
admin@localhost foodb $ pgmigrate -t 3 info
{
"3": {
"description": "Forced baseline",
"transactional": true,
"version": 3,
"installed_on": "2016-09-29 00:37:27",
"type": "manual",
"installed_by": "admin"
}
}
```
## Migrations on empty database
When you have hundreds of migrations with some nontransactional ones
you really don't want to stop on each of them to get your empty database
to specific version (consider creating new database for some experiments).
PGmigrate is able to run such kind of migration in single command run
(but you should definitely know what are you doing).
Let's try it.
Drop and create empty `foodb`
```
postgres=# DROP DATABASE foodb;
DROP DATABASE
postgres=# CREATE DATABASE foodb;
CREATE DATABASE
```
Now migrate strait to version 3
```
admin@localhost foodb $ pgmigrate -t 3 migrate
```
Operations log will look like this:
```
admin@localhost foodb $ psql foodb
psql (9.5.4)
Type "help" for help.
foodb=# SELECT * FROM ops;
seq | op
-----+---------------------------------------------------------------
1 | beforeAll 00_create_database_ops.sql
2 | beforeEach 00_dummy_before_each.sql
3 | migration V0001__Initial_schema_foo.sql
4 | afterEach 00_dummy_after_each.sql
5 | beforeEach 00_dummy_before_each.sql
6 | migration V0002__Add_baz_column_to_foo.sql
7 | afterEach 00_dummy_after_each.sql
8 | afterAll 00_dummy_after_all.sql
9 | grants foo
10 | migration V0003__NONTRANSACTIONAL_Add_index_on_baz_column.sql
(10 rows)
```

22
features/baseline.feature Normal file
View File

@ -0,0 +1,22 @@
Feature: Baseline
Scenario: Setting baseline leaves only one migration
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
| V2__Another_migration.sql | SELECT 1; |
And database and connection
And successful pgmigrate run with "-t 2 migrate"
When we run pgmigrate with "-b 3 baseline"
Then pgmigrate command "succeeded"
And database contains schema_version
And migration info contains forced baseline=3
Scenario: Setting baseline on noninitalized database
Given migration dir
And database and connection
When we run pgmigrate with "-b 1 baseline"
Then pgmigrate command "succeeded"
And database contains schema_version
And migration info contains forced baseline=1

12
features/clean.feature Normal file
View File

@ -0,0 +1,12 @@
Feature: Clean
Scenario: Cleaning database makes it uninitialized
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
And database and connection
And successful pgmigrate run with "-t 1 migrate"
When we run pgmigrate with "clean"
Then pgmigrate command "succeeded"
And database has no schema_version table

62
features/config.feature Normal file
View File

@ -0,0 +1,62 @@
Feature: Getting info from config
Scenario: Callbacks from config are executed in correct order
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); |
| V2__Another_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); |
And config callbacks
| type | file | code |
| beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); |
| beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); |
| afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); |
| afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); |
And database and connection
When we run pgmigrate with "-t 2 migrate"
Then pgmigrate command "succeeded"
And database contains schema_version
And query "SELECT * from mycooltable order by seq;" equals
| seq | op |
| 1 | Before each |
| 2 | Migration 1 |
| 3 | After each |
| 4 | Before each |
| 5 | Migration 2 |
| 6 | After each |
| 7 | After all |
Scenario: Callbacks from config are executed from dir
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); |
And config callbacks
| type | dir | file | code |
| afterAll | after_all | callback.sql | INSERT INTO mycooltable (op) values ('After all'); |
And database and connection
When we run pgmigrate with "-t 2 migrate"
Then pgmigrate command "succeeded"
And database contains schema_version
And query "SELECT * from mycooltable order by seq;" equals
| seq | op |
| 1 | After all |
Scenario: Callbacks from config are overrided by args
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); |
And config callbacks
| type | file | code |
| INVALID | callback.sql | SELECT 1; |
And callbacks
| type | file | code |
| afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); |
And database and connection
When we run pgmigrate with our callbacks and "-t 2 migrate"
Then pgmigrate command "succeeded"
And database contains schema_version
And query "SELECT * from mycooltable order by seq;" equals
| seq | op |
| 1 | After all |

11
features/dryrun.feature Normal file
View File

@ -0,0 +1,11 @@
Feature: Dryrun
Scenario: One migration in dir applies after migrate command
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
And database and connection
When we run pgmigrate with "-n -t 1 migrate"
Then pgmigrate command "succeeded"
And database has no schema_version table

View File

@ -0,0 +1,5 @@
Feature: Empty database database has no schema_version table
Scenario: Check uninitialized
Given database and connection
Then database has no schema_version table

21
features/environment.py Normal file
View File

@ -0,0 +1,21 @@
import shutil
def before_scenario(context, scenario):
try:
context.last_migrate_res = {}
context.callbacks = []
context.migrate_config = {}
shutil.rmtree(context.migr_dir)
except Exception:
pass
def after_all(context):
try:
context.last_migrate_res = {}
context.callbacks = []
context.migrate_config = {}
shutil.rmtree(context.migr_dir)
except Exception:
pass

View File

@ -0,0 +1,135 @@
Feature: Handling migration errors
Scenario: Conflicting migration versions
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
| V1__Another_migration.sql | SELECT 1; |
Then versions conflict with version=1
Scenario: Migration with bad sql
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | THIS_IS_ERROR |
And database and connection
When we run pgmigrate with "-t 1 migrate"
Then pgmigrate command "failed"
And database has no schema_version table
And migrate command failed with THIS_IS_ERROR
Scenario: Migration without target
Given migration dir
And database and connection
When we run pgmigrate with "migrate"
Then pgmigrate command "failed"
And database has no schema_version table
And migrate command failed with Unknown target
Scenario: Wrong schema_version structure
Given migration dir
And database and connection
And query "CREATE TABLE public.schema_version (bla text, blabla text);"
When we run pgmigrate with "-t 1 migrate"
Then pgmigrate command "failed"
And migrate command failed with unexpected structure
Scenario: Migration with non-ascii symbols
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | |
And database and connection
When we run pgmigrate with "-t 1 migrate"
Then pgmigrate command "failed"
And database has no schema_version table
And migrate command failed with Non ascii symbols in file
Scenario: Mix of transactional and nontransactional migrations
Given migration dir
And migrations
| file | code |
| V1__Transactional_migration.sql | SELECT 1; |
| V2__NONTRANSACTIONAL_migration.sql | SELECT 1; |
| V3__Transactional_migration.sql | SELECT 1; |
And database and connection
And successful pgmigrate run with "-t 1 migrate"
When we run pgmigrate with "-t 3 migrate"
Then pgmigrate command "failed"
And database contains schema_version
And migrate command failed with Unable to mix
Scenario: Baseline on applied version
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
And database and connection
And successful pgmigrate run with "-t 1 migrate"
When we run pgmigrate with "-b 1 baseline"
Then pgmigrate command "failed"
And database contains schema_version
And migrate command failed with already applied
Scenario: Invalid callback types
Given migration dir
And database and connection
When we run pgmigrate with "-a INVALID -t 1 migrate"
Then pgmigrate command "failed"
And database has no schema_version table
And migrate command failed with Unexpected callback type
Scenario: Invalid callback types from config
Given migration dir
And database and connection
And config callbacks
| type | file | code |
| INVALID | callback.sql | SELECT 1; |
When we run pgmigrate with "-t 1 migrate"
Then pgmigrate command "failed"
And database has no schema_version table
And migrate command failed with Unexpected callback type
Scenario: Missing callback files
Given migration dir
And database and connection
When we run pgmigrate with "-a afterAll:missing.sql -t 1 migrate"
Then pgmigrate command "failed"
And database has no schema_version table
And migrate command failed with Path unavailable
Scenario: Invalid callback types from config
Given migration dir
And database and connection
And config callbacks
| type | file |
| afterAll | callback.sql |
When we run pgmigrate with "-t 1 migrate"
Then pgmigrate command "failed"
And database has no schema_version table
And migrate command failed with Path unavailable
Scenario: Dry run for nontransactional migrations
Given migration dir
And migrations
| file | code |
| V1__Transactional_migration.sql | SELECT 1; |
| V2__NONTRANSACTIONAL_migration.sql | SELECT 1; |
And database and connection
And successful pgmigrate run with "-t 1 migrate"
When we run pgmigrate with "-n -t 2 migrate"
Then pgmigrate command "failed"
And database contains schema_version
And migrate command failed with is nonsence
Scenario: Nontransactional migration on empty database
Given migration dir
And migrations
| file | code |
| V1__NONTRANSACTIONAL_migration.sql | SELECT 1; |
And database and connection
When we run pgmigrate with "-t 1 migrate"
Then pgmigrate command "failed"
And migrate command failed with First migration MUST be transactional
And database has no schema_version table

11
features/info.feature Normal file
View File

@ -0,0 +1,11 @@
Feature: Info
Scenario: Info prints applied migration
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
And database and connection
And successful pgmigrate run with "-t 1 migrate"
When we run pgmigrate with "info"
Then migrate command passed with Single migration

View File

@ -0,0 +1,74 @@
Feature: Getting migrations from dir
Scenario: Empty dir gives empty migrations list
Given migration dir
Then migration list is empty
Scenario: One migration in dir gives migration list with only this migration
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
Then migration list equals single transactional migration
Scenario: Garbage migrations are properly ignored
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
| kekekeke.sql | SELECT 1; |
And migration dir "V2__Dir_migration.sql"
Then migration list equals single transactional migration
Scenario: One migration in dir applies after migrate command
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | SELECT 1; |
And database and connection
When we run pgmigrate with "-t 1 migrate"
Then pgmigrate command "succeeded"
And database contains schema_version
And migration info contains single migration
Scenario: Callbacks are executed in correct order
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); |
| V2__Another_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); |
And callbacks
| type | file | code |
| beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); |
| beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); |
| afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); |
| afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); |
And database and connection
When we run pgmigrate with our callbacks and "-t 2 migrate"
Then pgmigrate command "succeeded"
And database contains schema_version
And query "SELECT * from mycooltable order by seq;" equals
| seq | op |
| 1 | Before each |
| 2 | Migration 1 |
| 3 | After each |
| 4 | Before each |
| 5 | Migration 2 |
| 6 | After each |
| 7 | After all |
Scenario: Callbacks are executed from dir
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); |
And callbacks
| type | file | code |
| afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); |
And database and connection
When we run pgmigrate with dir callbacks and type "afterAll" and "-t 2 migrate"
Then pgmigrate command "succeeded"
And database contains schema_version
And query "SELECT * from mycooltable order by seq;" equals
| seq | op |
| 1 | After all |

11
features/modeline.feature Normal file
View File

@ -0,0 +1,11 @@
Feature: Modelines in migration/callback files
Scenario: Migration with non-ascii symbols and modeline
Given migration dir
And migrations
| file | code |
| V1__Single_migration.sql | /* pgmigrate-encoding: utf-8 */SELECT ''; |
And database and connection
When we run pgmigrate with "-t 1 migrate"
Then pgmigrate command "succeeded"
And database contains schema_version

View File

@ -0,0 +1,80 @@
Feature: Nontransactional migrations support
Scenario: Callbacks are not executed on nontransactional migration
Given migration dir
And migrations
| file | code |
| V1__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); |
| V2__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); |
And callbacks
| type | file | code |
| beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); |
| beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); |
| afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); |
| afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); |
And database and connection
And successful pgmigrate run with our callbacks and "-t 1 migrate"
When we run pgmigrate with our callbacks and "-t 2 migrate"
Then pgmigrate command "succeeded"
And database contains schema_version
And query "SELECT * from mycooltable order by seq;" equals
| seq | op |
| 1 | Before each |
| 2 | Migration 1 |
| 3 | After each |
| 4 | After all |
| 5 | Migration 2 |
Scenario: Callbacks are executed on nontransactional migration on empty database in correct order 1
Given migration dir
And migrations
| file | code |
| V1__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); |
| V2__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); |
And callbacks
| type | file | code |
| beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); |
| beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); |
| afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); |
| afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); |
And database and connection
When we run pgmigrate with our callbacks and "-t 2 migrate"
Then pgmigrate command "succeeded"
And migrate command passed with Migrating to version 2
And database contains schema_version
And query "SELECT * from mycooltable order by seq;" equals
| seq | op |
| 1 | Before each |
| 2 | Migration 1 |
| 3 | After each |
| 4 | After all |
| 5 | Migration 2 |
Scenario: Callbacks are executed on nontransactional migration on empty database in correct order 2
Given migration dir
And migrations
| file | code |
| V1__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 1'); |
| V2__NONTRANSACTIONAL_migration.sql | INSERT INTO mycooltable (op) values ('Migration 2'); |
| V3__Transactional_migration.sql | INSERT INTO mycooltable (op) values ('Migration 3'); |
And callbacks
| type | file | code |
| beforeAll | before_all.sql | CREATE TABLE mycooltable (seq SERIAL PRIMARY KEY, op TEXT); |
| beforeEach | before_each.sql | INSERT INTO mycooltable (op) values ('Before each'); |
| afterEach | after_each.sql | INSERT INTO mycooltable (op) values ('After each'); |
| afterAll | after_all.sql | INSERT INTO mycooltable (op) values ('After all'); |
And database and connection
When we run pgmigrate with our callbacks and "-t 3 migrate"
Then pgmigrate command "succeeded"
And migrate command passed with Migrating to version 3
And database contains schema_version
And query "SELECT * from mycooltable order by seq;" equals
| seq | op |
| 1 | Before each |
| 2 | Migration 1 |
| 3 | After each |
| 4 | Migration 2 |
| 5 | Before each |
| 6 | Migration 3 |
| 7 | After each |
| 8 | After all |

View File

@ -0,0 +1,57 @@
import os
from behave import given, when
@given('callbacks')
def step_impl(context):
for row in context.table:
path = os.path.join('callbacks', row['file'])
context.callbacks.append(row['type'] + ':' + path)
if row.get('code', False):
with open(os.path.join(context.migr_dir, path), 'w') as f:
f.write(row['code'])
@given('config callbacks') # noqa
def step_impl(context):
for row in context.table:
if row.get('dir', False):
dir_path = os.path.join('callbacks', row['dir'])
path = os.path.join(dir_path, row['file'])
else:
dir_path = None
path = os.path.join('callbacks', row['file'])
if 'callbacks' not in context.migrate_config:
context.migrate_config['callbacks'] = {}
if row['type'] not in context.migrate_config['callbacks']:
context.migrate_config['callbacks'][row['type']] = []
if dir_path:
context.migrate_config['callbacks'][row['type']].append(dir_path)
if not os.path.exists(os.path.join(context.migr_dir, dir_path)):
os.mkdir(os.path.join(context.migr_dir, dir_path))
else:
context.migrate_config['callbacks'][row['type']].append(path)
if row.get('code', False):
with open(os.path.join(context.migr_dir, path), 'w') as f:
f.write(row['code'])
@given('successful pgmigrate run with our callbacks and "{args}"') # noqa
def step_impl(context, args):
cbs = ','.join(context.callbacks)
context.execute_steps('given successful pgmigrate run with ' +
'"%s"' % ('-a ' + cbs + ' ' + args,))
@when('we run pgmigrate with our callbacks and "{args}"') # noqa
def step_impl(context, args):
cbs = ','.join(context.callbacks)
context.execute_steps('when we run pgmigrate with ' +
'"%s"' % ('-a ' + cbs + ' ' + args,))
@when('we run pgmigrate with dir callbacks and type "{cb_type}" and "{args}"') # noqa
def step_impl(context, cb_type, args):
p_args = '-a ' + cb_type + ':' + context.migr_dir + '/callbacks/ ' + args
context.execute_steps('when we run pgmigrate with "%s"' % (p_args,))

View File

@ -0,0 +1,8 @@
from behave import then
from pgmigrate import _is_initialized
@then("database contains schema_version")
def step_impl(context):
cur = context.conn.cursor()
assert _is_initialized(cur), 'Non-empty db should be initialized'

View File

@ -0,0 +1,8 @@
from behave import then
from pgmigrate import _is_initialized
@then("database has no schema_version table")
def step_impl(context):
cur = context.conn.cursor()
assert not _is_initialized(cur), 'Database should be uninitialized'

View File

@ -0,0 +1,12 @@
from behave import then
from pgmigrate import MalformedMigration, _get_migrations_info_from_dir
@then('versions conflict with version={version}')
def step_impl(context, version):
try:
_get_migrations_info_from_dir(context.migr_dir)
except MalformedMigration as e:
assert 'migrations with same version: ' + str(version) in str(e)
return
raise RuntimeError('No failure on version conflict')

View File

@ -0,0 +1,16 @@
import psycopg2
from behave import given
@given('database and connection')
def step_impl(context):
context.conn = None
conn = psycopg2.connect('dbname=postgres')
conn.autocommit = True
cur = conn.cursor()
cur.execute("select pg_terminate_backend(pid) " +
"from pg_stat_activity where datname='pgmigratetest'")
cur.execute('drop database if exists pgmigratetest')
cur.execute('create database pgmigratetest')
context.conn = psycopg2.connect('dbname=pgmigratetest')

View File

@ -0,0 +1,20 @@
import io
import os
from behave import given
@given('migrations') # noqa
def step_impl(context):
migrations_path = os.path.join(context.migr_dir, 'migrations')
for row in context.table:
path = os.path.join(migrations_path, row['file'])
with io.open(path, 'w', encoding='utf-8') as f:
f.write(row['code'])
@given('migration dir "{dirname}"') # noqa
def step_impl(context, dirname):
migrations_path = os.path.join(context.migr_dir, 'migrations')
path = os.path.join(migrations_path, dirname)
os.mkdir(path)

View File

@ -0,0 +1,16 @@
import os
import shutil
import tempfile
from behave import given
@given('migration dir')
def step_impl(context):
try:
shutil.rmtree(context.migr_dir)
except Exception:
pass
context.migr_dir = tempfile.mkdtemp()
os.mkdir(os.path.join(context.migr_dir, 'migrations'))
os.mkdir(os.path.join(context.migr_dir, 'callbacks'))

View File

@ -0,0 +1,11 @@
from behave import then
@then('migrate command failed with {error}')
def step_impl(context, error):
assert context.last_migrate_res['ret'] != 0, \
'Not failed with: ' + context.last_migrate_res['err'].decode('utf-8',
'ignore')
assert error in context.last_migrate_res['err'], \
'Actual result: ' + context.last_migrate_res['err'].decode('utf-8',
'ignore')

View File

@ -0,0 +1,10 @@
from behave import then
from pgmigrate import _get_info
@then("migration info contains forced baseline={baseline}")
def step_impl(context, baseline):
cur = context.conn.cursor()
info = _get_info(context.migr_dir, 0, 1, cur)
assert list(info.values())[0]['version'] == int(baseline)
assert list(info.values())[0]['description'] == 'Forced baseline'

View File

@ -0,0 +1,10 @@
from behave import then
from pgmigrate import _get_info
@then("migration info contains single migration")
def step_impl(context):
cur = context.conn.cursor()
info = _get_info(context.migr_dir, 0, 1, cur)
assert list(info.values())[0]['version'] == 1
assert list(info.values())[0]['description'] == 'Single migration'

View File

@ -0,0 +1,7 @@
from behave import then
from pgmigrate import _get_migrations_info_from_dir
@then('migration list is empty')
def step_impl(context):
assert len(_get_migrations_info_from_dir(context.migr_dir).keys()) == 0

View File

@ -0,0 +1,11 @@
from behave import then
from pgmigrate import _get_migrations_info_from_dir
@then('migration list equals single transactional migration')
def step_impl(context):
assert len(_get_migrations_info_from_dir(context.migr_dir).keys()) == 1
migration = list(_get_migrations_info_from_dir(
context.migr_dir).values())[0]
assert migration.meta['version'] == 1
assert migration.meta['description'] == 'Single migration'

View File

@ -0,0 +1,9 @@
from behave import then
@then('migrate command passed with {message}')
def step_impl(context, message):
assert context.last_migrate_res['ret'] == 0, \
'Failed with: ' + context.last_migrate_res['err']
assert message in context.last_migrate_res['err'], \
'Actual result: ' + context.last_migrate_res['err']

21
features/steps/query.py Normal file
View File

@ -0,0 +1,21 @@
from behave import given, then
@given('query "{query}"') # noqa
def step_impl(context, query):
cur = context.conn.cursor()
cur.execute(query)
cur.execute('commit;')
@then('query "{query}" equals') # noqa
def step_impl(context, query):
cur = context.conn.cursor()
cur.execute(query)
r = cur.fetchall()
formatted = ';'.join(map(lambda x: '|'.join(map(str, x)), r))
res = []
for row in context.table:
res.append(row['seq'] + '|' + row['op'])
result = ';'.join(res)
assert formatted == result, 'Unexpected result: ' + formatted

View File

@ -0,0 +1,59 @@
import os
import subprocess
import sys
import yaml
from behave import given, then, when
def run_pgmigrate(migr_dir, args):
cmd = ['coverage', 'run', '-p', '--include=pgmigrate.py',
'./pgmigrate.py', '-vvv', '-d', migr_dir,
'-c', 'dbname=pgmigratetest'] + str(args).split()
p = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
return p.returncode, str(stdout), str(stderr)
@given('successful pgmigrate run with "{args}"')
def step_impl(context, args):
if context.migrate_config:
with open(os.path.join(context.migr_dir, 'migrations.yml'), 'w') as f:
f.write(yaml.dump(context.migrate_config))
res = run_pgmigrate(context.migr_dir, args)
if res[0] != 0:
sys.stdout.write(res[1])
sys.stderr.write(res[2])
raise Exception('Expected success got retcode=%d' % res[0])
@when('we run pgmigrate with "{args}"') # noqa
def step_impl(context, args):
if context.migrate_config:
with open(os.path.join(context.migr_dir, 'migrations.yml'), 'w') as f:
f.write(yaml.dump(context.migrate_config))
res = run_pgmigrate(context.migr_dir, args)
context.last_migrate_res = {'ret': res[0], 'out': res[1], 'err': res[2]}
@then('pgmigrate command "{result}"') # noqa
def step_impl(context, result):
if not context.last_migrate_res:
raise Exception('No pgmigrate run detected in current context')
if result == 'failed' and context.last_migrate_res['ret'] == 0:
sys.stdout.write(str(context.last_migrate_res['out']))
sys.stderr.write(str(context.last_migrate_res['err']))
raise Exception('Expected failure got success')
elif result == 'succeeded' and context.last_migrate_res['ret'] != 0:
sys.stdout.write(str(context.last_migrate_res['out']))
sys.stderr.write(str(context.last_migrate_res['err']))
raise Exception('Expected success got retcode='
'%d' % context.last_migrate_res['ret'])
elif result not in ['failed', 'succeeded']:
raise Exception('Incorrect step arguments')

666
pgmigrate.py Executable file
View File

@ -0,0 +1,666 @@
#!/usr/bin/env python
'''
PGmigrate - PostgreSQL migrations made easy
'''
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Yandex LLC <https://github.com/yandex>
# Copyright (c) 2016 Other contributors as noted in the AUTHORS file.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose, without fee, and without a written
# agreement is hereby granted, provided that the above copyright notice
# and this paragraph and the following two paragraphs appear in all copies.
#
# IN NO EVENT SHALL YANDEX LLC BE LIABLE TO ANY PARTY FOR DIRECT,
# INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST
# PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION,
# EVEN IF YANDEX LLC HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# YANDEX SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS"
# BASIS, AND YANDEX LLC HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import codecs
import json
import logging
import os
import re
import sys
from builtins import str as text
from collections import namedtuple
import psycopg2
import sqlparse
import yaml
from psycopg2.extras import LoggingConnection
LOG = logging.getLogger(__name__)
class MigrateError(RuntimeError):
'''
Common migration error class
'''
pass
class MalformedStatement(MigrateError):
'''
Incorrect statement exception
'''
pass
class MalformedMigration(MigrateError):
'''
Incorrect migration exception
'''
pass
class MalformedSchema(MigrateError):
'''
Incorrect schema exception
'''
pass
class ConfigParseError(MigrateError):
'''
Incorrect config or cmd args exception
'''
pass
class BaselineError(MigrateError):
'''
Baseline error class
'''
pass
REF_COLUMNS = ['version', 'description', 'type',
'installed_by', 'installed_on']
def _create_connection(conn_string):
conn = psycopg2.connect(conn_string, connection_factory=LoggingConnection)
conn.initialize(LOG)
return conn
def _is_initialized(cursor):
'''
Check that database is initialized
'''
query = cursor.mogrify('SELECT EXISTS(SELECT 1 FROM '
'information_schema.tables '
'WHERE table_schema = %s '
'AND table_name = %s);',
('public', 'schema_version'))
cursor.execute(query)
table_exists = cursor.fetchone()[0]
if not table_exists:
return False
cursor.execute('SELECT * from public.schema_version limit 1;')
colnames = [desc[0] for desc in cursor.description]
if colnames != REF_COLUMNS:
raise MalformedSchema('Table schema_version has unexpected '
'structure: %s' % '|'.join(colnames))
return True
MIGRATION_FILE_RE = re.compile(
r'V(?P<version>\d+)__(?P<description>.+)\.sql$'
)
MigrationInfo = namedtuple('MigrationInfo', ('meta', 'filePath'))
Callbacks = namedtuple('Callbacks', ('beforeAll', 'beforeEach',
'afterEach', 'afterAll'))
Config = namedtuple('Config', ('target', 'baseline', 'cursor', 'dryrun',
'callbacks', 'base_dir', 'conn',
'conn_instance'))
CONFIG_IGNORE = ['cursor', 'conn_instance']
def _get_migrations_info_from_dir(base_dir):
'''
Get all migrations from base dir
'''
path = os.path.join(base_dir, 'migrations')
migrations = {}
if os.path.exists(path) and os.path.isdir(path):
for fname in os.listdir(path):
file_path = os.path.join(path, fname)
if not os.path.isfile(file_path):
continue
match = MIGRATION_FILE_RE.match(fname)
if match is None:
continue
version = int(match.group('version'))
ret = dict(
version=version,
type='auto',
installed_by=None,
installed_on=None,
description=match.group('description').replace('_', ' ')
)
ret['transactional'] = 'NONTRANSACTIONAL' not in ret['description']
migration = MigrationInfo(
ret,
file_path
)
if version in migrations:
raise MalformedMigration(
'Found migrations with same version: %d ' % version +
'\nfirst : %s' % migration.filePath +
'\nsecond: %s' % migrations[version].filePath)
migrations[version] = migration
return migrations
def _get_migrations_info(base_dir, baseline_v, target_v):
'''
Get migrations from baseline to target from base dir
'''
migrations = {}
for version, ret in _get_migrations_info_from_dir(base_dir).items():
if version > baseline_v and version <= target_v:
migrations[version] = ret.meta
else:
LOG.info(
'Ignore migration %r cause baseline: %r or target: %r',
ret, baseline_v, target_v
)
return migrations
def _get_info(base_dir, baseline_v, target_v, cursor):
'''
Get migrations info from database and base dir
'''
ret = {}
cursor.execute('SELECT ' + ', '.join(REF_COLUMNS) +
' from public.schema_version;')
for i in cursor.fetchall():
version = {}
for j in enumerate(REF_COLUMNS):
if j[1] == 'installed_on':
version[j[1]] = i[j[0]].strftime('%F %H:%M:%S')
else:
version[j[1]] = i[j[0]]
version['version'] = int(version['version'])
transactional = 'NONTRANSACTIONAL' not in version['description']
version['transactional'] = transactional
ret[version['version']] = version
baseline_v = max(baseline_v, sorted(ret.keys())[-1])
migrations_info = _get_migrations_info(base_dir, baseline_v, target_v)
for version in migrations_info:
num = migrations_info[version]['version']
if num not in ret:
ret[num] = migrations_info[version]
return ret
def _get_state(base_dir, baseline_v, target, cursor):
'''
Get info wrapper (able to handle noninitialized database)
'''
if _is_initialized(cursor):
return _get_info(base_dir, baseline_v, target, cursor)
else:
return _get_migrations_info(base_dir, baseline_v, target)
def _set_baseline(baseline_v, cursor):
'''
Cleanup schema_version and set baseline
'''
query = cursor.mogrify('SELECT EXISTS(SELECT 1 FROM public'
'.schema_version WHERE version >= %s::bigint);',
(baseline_v,))
cursor.execute(query)
check_failed = cursor.fetchone()[0]
if check_failed:
raise BaselineError('Unable to baseline, version '
'%s already applied' % text(baseline_v))
LOG.info('cleaning up table schema_version')
cursor.execute('DELETE FROM public.schema_version;')
LOG.info(cursor.statusmessage)
LOG.info('setting baseline')
query = cursor.mogrify('INSERT INTO public.schema_version '
'(version, type, description, installed_by) '
'VALUES (%s::bigint, %s, %s, CURRENT_USER);',
(text(baseline_v), 'manual', 'Forced baseline'))
cursor.execute(query)
LOG.info(cursor.statusmessage)
def _init_schema(cursor):
'''
Create schema_version table
'''
LOG.info('creating type schema_version_type')
query = cursor.mogrify('CREATE TYPE public.schema_version_type '
'AS ENUM (%s, %s);', ('auto', 'manual'))
cursor.execute(query)
LOG.info(cursor.statusmessage)
LOG.info('creating table schema_version')
query = cursor.mogrify('CREATE TABLE public.schema_version ('
'version BIGINT NOT NULL PRIMARY KEY, '
'description TEXT NOT NULL, '
'type public.schema_version_type NOT NULL '
'DEFAULT %s, '
'installed_by TEXT NOT NULL, '
'installed_on TIMESTAMP WITHOUT time ZONE '
'DEFAULT now() NOT NULL);', ('auto',))
cursor.execute(query)
LOG.info(cursor.statusmessage)
def _get_statements(path):
'''
Get statements from file
'''
with codecs.open(path, encoding='utf-8') as i:
data = i.read()
if u'/* pgmigrate-encoding: utf-8 */' not in data:
try:
data.encode('ascii')
except UnicodeError as exc:
raise MalformedStatement(
'Non ascii symbols in file: {0}, {1}'.format(
path, text(exc)))
for statement in sqlparse.parsestream(data, encoding='utf-8'):
st_str = text(statement).strip().encode('utf-8')
if st_str:
yield st_str
def _apply_statement(statement, cursor):
'''
Execute statement using cursor
'''
try:
cursor.execute(statement, 'utf-8')
except psycopg2.Error as exc:
LOG.error('Error executing statement:')
for line in statement.splitlines():
LOG.error(line)
LOG.error(exc)
raise MigrateError('Unable to apply statement')
def _apply_file(file_path, cursor):
'''
Execute all statements in file
'''
try:
for statement in _get_statements(file_path):
_apply_statement(statement, cursor)
except MalformedStatement as exc:
LOG.error(exc)
raise exc
def _apply_version(version, base_dir, cursor):
'''
Execute all statements in migration version
'''
all_versions = _get_migrations_info_from_dir(base_dir)
version_info = all_versions[version]
LOG.info('Try apply version %r', version_info)
_apply_file(version_info.filePath, cursor)
query = cursor.mogrify('INSERT INTO public.schema_version '
'(version, description, installed_by) '
'VALUES (%s::bigint, %s, CURRENT_USER)',
(text(version),
version_info.meta['description']))
cursor.execute(query)
def _parse_str_callbacks(callbacks, ret, base_dir):
callbacks = callbacks.split(',')
for callback in callbacks:
if not callback:
continue
tokens = callback.split(':')
if tokens[0] not in ret._fields:
raise ConfigParseError('Unexpected callback '
'type: %s' % text(tokens[0]))
path = os.path.join(base_dir, tokens[1])
if not os.path.exists(path):
raise ConfigParseError('Path unavailable: %s' % text(path))
if os.path.isdir(path):
for fname in sorted(os.listdir(path)):
getattr(ret, tokens[0]).append(os.path.join(path, fname))
else:
getattr(ret, tokens[0]).append(path)
return ret
def _parse_dict_callbacks(callbacks, ret, base_dir):
for i in callbacks:
if i in ret._fields:
for j in callbacks[i]:
path = os.path.join(base_dir, j)
if not os.path.exists(path):
raise ConfigParseError('Path unavailable: %s' % text(path))
if os.path.isdir(path):
for fname in sorted(os.listdir(path)):
getattr(ret, i).append(os.path.join(path, fname))
else:
getattr(ret, i).append(path)
else:
raise ConfigParseError('Unexpected callback type: %s' % text(i))
return ret
def _get_callbacks(callbacks, base_dir=''):
'''
Parse cmdline/config callbacks
'''
ret = Callbacks(beforeAll=[],
beforeEach=[],
afterEach=[],
afterAll=[])
if isinstance(callbacks, dict):
return _parse_dict_callbacks(callbacks, ret, base_dir)
else:
return _parse_str_callbacks(callbacks, ret, base_dir)
def _migrate_step(state, callbacks, base_dir, cursor):
'''
Apply one version with callbacks
'''
before_all_executed = False
should_migrate = False
cursor.execute('SET lock_timeout = 0;')
if not _is_initialized(cursor):
LOG.info('schema not initialized')
_init_schema(cursor)
for version in sorted(state.keys()):
LOG.debug('has version %r', version)
if state[version]['installed_on'] is None:
should_migrate = True
if not before_all_executed and callbacks.beforeAll:
LOG.info('Executing beforeAll callbacks:')
for callback in callbacks.beforeAll:
_apply_file(callback, cursor)
LOG.info(callback)
before_all_executed = True
LOG.info('Migrating to version %d', version)
if callbacks.beforeEach:
LOG.info('Executing beforeEach callbacks:')
for callback in callbacks.beforeEach:
LOG.info(callback)
_apply_file(callback, cursor)
_apply_version(version, base_dir, cursor)
if callbacks.afterEach:
LOG.info('Executing afterEach callbacks:')
for callback in callbacks.afterEach:
LOG.info(callback)
_apply_file(callback, cursor)
if should_migrate and callbacks.afterAll:
LOG.info('Executing afterAll callbacks:')
for callback in callbacks.afterAll:
LOG.info(callback)
_apply_file(callback, cursor)
def _finish(config):
if config.dryrun:
config.cursor.execute('rollback')
else:
config.cursor.execute('commit')
def info(config, stdout=True):
'''
Info cmdline wrapper
'''
state = _get_state(config.base_dir, config.baseline,
config.target, config.cursor)
if stdout:
sys.stdout.write(
json.dumps(state, indent=4, separators=(',', ': ')) + '\n')
_finish(config)
return state
def clean(config):
'''
Drop schema_version table
'''
if _is_initialized(config.cursor):
LOG.info('dropping schema_version')
config.cursor.execute('DROP TABLE public.schema_version;')
LOG.info(config.cursor.statusmessage)
LOG.info('dropping schema_version_type')
config.cursor.execute('DROP TYPE public.schema_version_type;')
LOG.info(config.cursor.statusmessage)
_finish(config)
def baseline(config):
'''
Set baseline cmdline wrapper
'''
if not _is_initialized(config.cursor):
_init_schema(config.cursor)
_set_baseline(config.baseline, config.cursor)
_finish(config)
def _prepare_nontransactional_steps(state, callbacks):
steps = []
i = {'state': {},
'cbs': _get_callbacks('')}
for version in sorted(state):
if not state[version]['transactional']:
if i['state']:
steps.append(i)
i = {'state': {},
'cbs': _get_callbacks('')}
elif len(steps) == 0:
LOG.error('First migration MUST be transactional')
raise MalformedMigration('First migration MUST '
'be transactional')
steps.append({'state': {version: state[version]},
'cbs': _get_callbacks('')})
else:
i['state'][version] = state[version]
i['cbs'] = callbacks
if i['state']:
steps.append(i)
prev_nontransactional = False
for (num, step) in enumerate(steps):
if not list(step['state'].values())[0]['transactional']:
if num != len(steps) - 1:
steps[num-1]['cbs'] = steps[num-1]['cbs']._replace(afterAll=[])
prev_nontransactional = True
else:
if prev_nontransactional:
steps[num]['cbs'] = steps[num]['cbs']._replace(beforeAll=[])
prev_nontransactional = False
LOG.info('Initialization plan result:\n %s',
json.dumps(steps, indent=4, separators=(',', ': ')))
return steps
def migrate(config):
'''
Migrate cmdline wrapper
'''
if config.target is None:
LOG.error('Unknown target')
raise MigrateError('Unknown target')
state = _get_state(config.base_dir, config.baseline,
config.target, config.cursor)
not_applied = [x for x in state if state[x]['installed_on'] is None]
non_trans = [x for x in not_applied if not state[x]['transactional']]
if len(non_trans) > 0:
if config.dryrun:
LOG.error('Dry run for nontransactional migrations '
'is nonsence')
raise MigrateError('Dry run for nontransactional migrations '
'is nonsence')
if len(state) != len(not_applied):
if len(not_applied) != len(non_trans):
LOG.error('Unable to mix transactional and '
'nontransactional migrations')
raise MigrateError('Unable to mix transactional and '
'nontransactional migrations')
config.cursor.execute('rollback;')
nt_conn = _create_connection(config.conn)
nt_conn.autocommit = True
cursor = nt_conn.cursor()
_migrate_step(state, _get_callbacks(''),
config.base_dir, cursor)
else:
steps = _prepare_nontransactional_steps(state, config.callbacks)
nt_conn = _create_connection(config.conn)
nt_conn.autocommit = True
commit_req = False
for step in steps:
if commit_req:
config.cursor.execute('commit')
commit_req = False
if not list(step['state'].values())[0]['transactional']:
cur = nt_conn.cursor()
else:
cur = config.cursor
commit_req = True
_migrate_step(step['state'], step['cbs'], config.base_dir, cur)
else:
_migrate_step(state, config.callbacks, config.base_dir, config.cursor)
_finish(config)
COMMANDS = {
'info': info,
'clean': clean,
'baseline': baseline,
'migrate': migrate,
}
CONFIG_DEFAULTS = Config(target=None, baseline=0, cursor=None, dryrun=False,
callbacks='', base_dir='',
conn='dbname=postgres user=postgres '
'connect_timeout=1',
conn_instance=None)
def get_config(base_dir, args=None):
'''
Load configuration from yml in base dir with respect of args
'''
path = os.path.join(base_dir, 'migrations.yml')
try:
with codecs.open(path, encoding='utf-8') as i:
base = yaml.load(i.read())
except IOError:
LOG.info('Unable to load %s. Using defaults', path)
base = {}
conf = CONFIG_DEFAULTS
for i in [j for j in CONFIG_DEFAULTS._fields if j not in CONFIG_IGNORE]:
if i in base:
conf = conf._replace(**{i: base[i]})
if args is not None:
if i in args.__dict__ and args.__dict__[i] is not None:
conf = conf._replace(**{i: args.__dict__[i]})
conf = conf._replace(conn_instance=_create_connection(conf.conn))
conf = conf._replace(cursor=conf.conn_instance.cursor())
conf = conf._replace(callbacks=_get_callbacks(conf.callbacks,
conf.base_dir))
return conf
def _main():
'''
Main function
'''
parser = argparse.ArgumentParser()
parser.add_argument('cmd',
choices=COMMANDS.keys(),
type=str,
help='Operation')
parser.add_argument('-t', '--target',
type=int,
help='Target version')
parser.add_argument('-c', '--conn',
type=str,
help='Postgresql connection string')
parser.add_argument('-d', '--base_dir',
type=str,
default='',
help='Migrations base dir')
parser.add_argument('-b', '--baseline',
type=int,
help='Baseline version')
parser.add_argument('-a', '--callbacks',
type=str,
help='Comma-separated list of callbacks '
'(type:dir/file)')
parser.add_argument('-n', '--dryrun',
action='store_true',
help='Say "rollback" in the end instead of "commit"')
parser.add_argument('-v', '--verbose',
default=0,
action='count',
help='Be verbose')
args = parser.parse_args()
logging.basicConfig(
level=(logging.ERROR - 10*(min(3, args.verbose))),
format='%(asctime)s %(levelname)-8s: %(message)s')
config = get_config(args.base_dir, args)
COMMANDS[args.cmd](config)
if __name__ == '__main__':
_main()

10
run_test.sh Executable file
View File

@ -0,0 +1,10 @@
#!/bin/sh
set -e
chown -R postgres:postgres /dist
mkdir -p /var/log/pogsgresql
chown postgres:postgres /var/log/pogsgresql
sudo -u postgres /usr/lib/postgresql/9.6/bin/pg_ctl -D /etc/postgresql/9.6/main -l /var/log/pogsgresql/postgresql-9.6-main.log start
cd /dist
sudo -u postgres -i tox -c /dist/tox.ini

61
setup.py Normal file
View File

@ -0,0 +1,61 @@
#!/usr/bin/env python
"""
setup.py for pgmigrate
"""
# encoding: utf-8
#
# Copyright (c) 2016 Yandex LLC <https://github.com/yandex>
# Copyright (c) 2016 Other contributors as noted in the AUTHORS file.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose, without fee, and without a written
# agreement is hereby granted, provided that the above copyright notice
# and this paragraph and the following two paragraphs appear in all copies.
#
# IN NO EVENT SHALL YANDEX LLC BE LIABLE TO ANY PARTY FOR DIRECT,
# INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST
# PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION,
# EVEN IF YANDEX LLC HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# YANDEX SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS"
# BASIS, AND YANDEX LLC HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE,
# SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
import sys
try:
from setuptools import setup
except ImportError:
from distutils import setup
REQUIREMENTS = [
'sqlparse >= 0.2.1',
'psycopg2 >= 2.6.2',
'PyYAML >= 3.12'
]
if sys.version_info < (3, 0):
REQUIREMENTS.append('future >= 0.15.2')
setup(
name="yandex-pgmigrate",
version="1.0.0",
description="PostgreSQL migrations made easy",
license="PostgreSQL License",
url="https://github.com/yandex/pgmigrate/",
author='Yandex LLC',
author_email='opensource@yandex-team.ru',
maintainer='Yandex LLC',
maintainer_email='opensource@yandex-team.ru',
zip_safe=False,
platforms=["Linux", "BSD", "MacOS"],
packages=['.'],
entry_points={
'console_scripts': [
'pgmigrate = pgmigrate:_main',
]},
install_requires=REQUIREMENTS,
)

47
tox.ini Normal file
View File

@ -0,0 +1,47 @@
# Tox (http://tox.testrun.org/) is a tool for running tests
# in multiple virtualenvs. This configuration file will run the
# test suite on all supported python versions. To use it, "pip install tox"
# and then run "tox" from this directory.
[tox]
envlist = py27, py35, flake8, pylint
[testenv:py27]
whitelist_externals = rm
commands = rm -rf htmlcov
coverage erase
coverage run -p --include=pgmigrate.py {envbindir}/behave
coverage combine
coverage html pgmigrate.py
coverage report --fail-under=100 pgmigrate.py
deps = behave
importlib
coverage
[testenv:py35]
whitelist_externals = rm
commands = rm -rf htmlcov
coverage erase
coverage run -p --include=pgmigrate.py {envbindir}/behave
coverage combine
coverage html pgmigrate.py
coverage report --fail-under=100 pgmigrate.py
deps = behave
coverage
[testenv:flake8]
commands = flake8 pgmigrate.py
deps = flake8
flake8-mock
flake8-string-format
flake8-isort
flake8-copyright
[testenv:pylint]
commands = pylint pgmigrate.py
deps = pylint
[flake8]
copyright-check = True
copyright-regexp = Copyright\s+(\(C\)\s+)?(\d{4}-)?2016\s+%(author)s
copyright-author = Yandex LLC