mirror of
https://github.com/dbcli/pgcli.git
synced 2024-11-23 20:26:31 +03:00
Port to psycopg3 (#1324)
* WIP. * Add some comments about porting from psycopg 2 to 3 (#1318) * WIP * Disable _set_wait_callback() * TransactionStatus. * First working query. * More pg3 changes. * test_pgexecute still fails. * Fix bytea support. * Fix json and enum unicode. * Get unit tests to pass. * Behave tests still break, WIP. * Prompt seems to be displayed fine, why don't the tests see the whitespace? * Python version. * Fix test. * Black. * Added black to dev reqs. * nbu link for donations. * Use psycopg.sql to format statement. * Special case for show help in pgbouncer. * Fix test. * Added integration test. * Install pgbouncer in ci. * Fix integration test. * Remove tmate session. * Revert commenting out python versions. * Pin pgspecial to >=2. * Changelog. Co-authored-by: Daniele Varrazzo <daniele.varrazzo@gmail.com> Co-authored-by: Amjith Ramanujam <amjith.r@gmail.com>
This commit is contained in:
parent
372da81ec4
commit
18071754bc
35
.github/workflows/ci.yml
vendored
35
.github/workflows/ci.yml
vendored
@ -11,7 +11,7 @@ jobs:
|
|||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
python-version: [3.6, 3.7, 3.8, 3.9]
|
python-version: ["3.7", "3.8", "3.9", "3.10"]
|
||||||
|
|
||||||
services:
|
services:
|
||||||
postgres:
|
postgres:
|
||||||
@ -35,6 +35,35 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
|
||||||
|
- name: Install pgbouncer
|
||||||
|
run: |
|
||||||
|
sudo apt install pgbouncer -y
|
||||||
|
|
||||||
|
sudo chmod 666 /etc/pgbouncer/*.*
|
||||||
|
|
||||||
|
cat <<EOF > /etc/pgbouncer/userlist.txt
|
||||||
|
"postgres" "postgres"
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat <<EOF > /etc/pgbouncer/pgbouncer.ini
|
||||||
|
[databases]
|
||||||
|
* = host=localhost port=5432
|
||||||
|
[pgbouncer]
|
||||||
|
listen_port = 6432
|
||||||
|
listen_addr = localhost
|
||||||
|
auth_type = trust
|
||||||
|
auth_file = /etc/pgbouncer/userlist.txt
|
||||||
|
logfile = pgbouncer.log
|
||||||
|
pidfile = pgbouncer.pid
|
||||||
|
admin_users = postgres
|
||||||
|
EOF
|
||||||
|
|
||||||
|
sudo systemctl stop pgbouncer
|
||||||
|
|
||||||
|
pgbouncer -d /etc/pgbouncer/pgbouncer.ini
|
||||||
|
|
||||||
|
psql -h localhost -U postgres -p 6432 pgbouncer -c 'show help'
|
||||||
|
|
||||||
- name: Install requirements
|
- name: Install requirements
|
||||||
run: |
|
run: |
|
||||||
pip install -U pip setuptools
|
pip install -U pip setuptools
|
||||||
@ -56,8 +85,8 @@ jobs:
|
|||||||
run: rst2html.py --halt=warning changelog.rst >/dev/null
|
run: rst2html.py --halt=warning changelog.rst >/dev/null
|
||||||
|
|
||||||
- name: Run Black
|
- name: Run Black
|
||||||
run: pip install black && black --check .
|
run: black --check .
|
||||||
if: matrix.python-version == '3.6'
|
if: matrix.python-version == '3.7'
|
||||||
|
|
||||||
- name: Coverage
|
- name: Coverage
|
||||||
run: |
|
run: |
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 21.5b0
|
rev: 22.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
|
10
README.rst
10
README.rst
@ -11,6 +11,7 @@ Picture by @fomenko_ph (Telegram).
|
|||||||
|
|
||||||
Please consider donating or volunteering.
|
Please consider donating or volunteering.
|
||||||
|
|
||||||
|
* https://bank.gov.ua/en/
|
||||||
* https://savelife.in.ua/en/donate/
|
* https://savelife.in.ua/en/donate/
|
||||||
* https://www.comebackalive.in.ua/donate
|
* https://www.comebackalive.in.ua/donate
|
||||||
* https://www.globalgiving.org/projects/ukraine-crisis-relief-fund/
|
* https://www.globalgiving.org/projects/ukraine-crisis-relief-fund/
|
||||||
@ -51,10 +52,7 @@ If you already know how to install python packages, then you can simply do:
|
|||||||
If you don't know how to install python packages, please check the
|
If you don't know how to install python packages, please check the
|
||||||
`detailed instructions`_.
|
`detailed instructions`_.
|
||||||
|
|
||||||
If you are restricted to using psycopg2 2.7.x then pip will try to install it from a binary. There are some known issues with the psycopg2 2.7 binary - see the `psycopg docs`_ for more information about this and how to force installation from source. psycopg2 2.8 has fixed these problems, and will build from source.
|
|
||||||
|
|
||||||
.. _`detailed instructions`: https://github.com/dbcli/pgcli#detailed-installation-instructions
|
.. _`detailed instructions`: https://github.com/dbcli/pgcli#detailed-installation-instructions
|
||||||
.. _`psycopg docs`: http://initd.org/psycopg/docs/install.html#change-in-binary-packages-between-psycopg-2-7-and-2-8
|
|
||||||
|
|
||||||
Usage
|
Usage
|
||||||
-----
|
-----
|
||||||
@ -353,8 +351,8 @@ choice:
|
|||||||
|
|
||||||
In [3]: my_result = _
|
In [3]: my_result = _
|
||||||
|
|
||||||
Pgcli only runs on Python3.6+ since 2.2.0, if you use an old version of Python,
|
Pgcli only runs on Python3.7+ since 4.0.0, if you use an old version of Python,
|
||||||
you should use install ``pgcli <= 2.2.0``.
|
you should use install ``pgcli <= 4.0.0``.
|
||||||
|
|
||||||
Thanks:
|
Thanks:
|
||||||
-------
|
-------
|
||||||
@ -368,7 +366,7 @@ of this app.
|
|||||||
`Click <http://click.pocoo.org/>`_ is used for command line option parsing
|
`Click <http://click.pocoo.org/>`_ is used for command line option parsing
|
||||||
and printing error messages.
|
and printing error messages.
|
||||||
|
|
||||||
Thanks to `psycopg <http://initd.org/psycopg/>`_ for providing a rock solid
|
Thanks to `psycopg <https://www.psycopg.org/>`_ for providing a rock solid
|
||||||
interface to Postgres database.
|
interface to Postgres database.
|
||||||
|
|
||||||
Thanks to all the beta testers and contributors for your time and patience. :)
|
Thanks to all the beta testers and contributors for your time and patience. :)
|
||||||
|
@ -1,3 +1,11 @@
|
|||||||
|
Upcoming:
|
||||||
|
=========
|
||||||
|
|
||||||
|
Internal:
|
||||||
|
---------
|
||||||
|
|
||||||
|
* Port to psycopg3 (https://github.com/psycopg/psycopg). Needs a major version bump.
|
||||||
|
|
||||||
3.4.1 (2022/03/19)
|
3.4.1 (2022/03/19)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
|
@ -1,12 +1,7 @@
|
|||||||
import platform
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from configobj import ConfigObj, ParseError
|
from configobj import ConfigObj, ParseError
|
||||||
from pgspecial.namedqueries import NamedQueries
|
from pgspecial.namedqueries import NamedQueries
|
||||||
from .config import skip_initial_comment
|
from .config import skip_initial_comment
|
||||||
|
|
||||||
warnings.filterwarnings("ignore", category=UserWarning, module="psycopg2")
|
|
||||||
|
|
||||||
import atexit
|
import atexit
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@ -22,7 +17,6 @@ import itertools
|
|||||||
import platform
|
import platform
|
||||||
from time import time, sleep
|
from time import time, sleep
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
keyring = None # keyring will be loaded later
|
keyring = None # keyring will be loaded later
|
||||||
|
|
||||||
@ -80,11 +74,9 @@ except ImportError:
|
|||||||
from urllib.parse import urlparse, unquote, parse_qs
|
from urllib.parse import urlparse, unquote, parse_qs
|
||||||
|
|
||||||
from getpass import getuser
|
from getpass import getuser
|
||||||
from psycopg2 import OperationalError, InterfaceError
|
|
||||||
|
|
||||||
# pg3: https://www.psycopg.org/psycopg3/docs/api/conninfo.html
|
from psycopg import OperationalError, InterfaceError
|
||||||
from psycopg2.extensions import make_dsn, parse_dsn
|
from psycopg.conninfo import make_conninfo, conninfo_to_dict
|
||||||
import psycopg2
|
|
||||||
|
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
@ -537,7 +529,7 @@ class PGCli:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def connect_uri(self, uri):
|
def connect_uri(self, uri):
|
||||||
kwargs = psycopg2.extensions.parse_dsn(uri)
|
kwargs = conninfo_to_dict(uri)
|
||||||
remap = {"dbname": "database", "password": "passwd"}
|
remap = {"dbname": "database", "password": "passwd"}
|
||||||
kwargs = {remap.get(k, k): v for k, v in kwargs.items()}
|
kwargs = {remap.get(k, k): v for k, v in kwargs.items()}
|
||||||
self.connect(**kwargs)
|
self.connect(**kwargs)
|
||||||
@ -585,7 +577,7 @@ class PGCli:
|
|||||||
if not passwd and keyring:
|
if not passwd and keyring:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
passwd = keyring.get_password("pgcli", key)
|
passwd = keyring.get_password("pgcli", key) or ""
|
||||||
except (RuntimeError, keyring.errors.InitError) as e:
|
except (RuntimeError, keyring.errors.InitError) as e:
|
||||||
click.secho(
|
click.secho(
|
||||||
keyring_error_message.format(
|
keyring_error_message.format(
|
||||||
@ -608,7 +600,7 @@ class PGCli:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
if dsn:
|
if dsn:
|
||||||
parsed_dsn = parse_dsn(dsn)
|
parsed_dsn = conninfo_to_dict(dsn)
|
||||||
if "host" in parsed_dsn:
|
if "host" in parsed_dsn:
|
||||||
host = parsed_dsn["host"]
|
host = parsed_dsn["host"]
|
||||||
if "port" in parsed_dsn:
|
if "port" in parsed_dsn:
|
||||||
@ -655,7 +647,7 @@ class PGCli:
|
|||||||
port = self.ssh_tunnel.local_bind_ports[0]
|
port = self.ssh_tunnel.local_bind_ports[0]
|
||||||
|
|
||||||
if dsn:
|
if dsn:
|
||||||
dsn = make_dsn(dsn, host=host, port=port)
|
dsn = make_conninfo(dsn, host=host, port=port)
|
||||||
|
|
||||||
# Attempt to connect to the database.
|
# Attempt to connect to the database.
|
||||||
# Note that passwd may be empty on the first attempt. If connection
|
# Note that passwd may be empty on the first attempt. If connection
|
||||||
@ -1208,7 +1200,7 @@ class PGCli:
|
|||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
# Default host is '' so psycopg2 can default to either localhost or unix socket
|
# Default host is '' so psycopg can default to either localhost or unix socket
|
||||||
@click.option(
|
@click.option(
|
||||||
"-h",
|
"-h",
|
||||||
"--host",
|
"--host",
|
||||||
@ -1606,18 +1598,11 @@ def format_output(title, cur, headers, status, settings, explain_mode=False):
|
|||||||
if hasattr(cur, "description"):
|
if hasattr(cur, "description"):
|
||||||
column_types = []
|
column_types = []
|
||||||
for d in cur.description:
|
for d in cur.description:
|
||||||
# pg3: type_name = cur.adapters.types[d.type_code].name
|
col_type = cur.adapters.types.get(d.type_code)
|
||||||
if (
|
type_name = col_type.name if col_type else None
|
||||||
# pg3: type_name in ("numeric", "float4", "float8")
|
if type_name in ("numeric", "float4", "float8"):
|
||||||
d[1] in psycopg2.extensions.DECIMAL.values
|
|
||||||
or d[1] in psycopg2.extensions.FLOAT.values
|
|
||||||
):
|
|
||||||
column_types.append(float)
|
column_types.append(float)
|
||||||
if (
|
if type_name in ("int2", "int4", "int8"):
|
||||||
# pg3: type_name in ("int2", "int4", "int8")
|
|
||||||
d[1] == psycopg2.extensions.INTEGER.values
|
|
||||||
or d[1] in psycopg2.extensions.LONGINTEGER.values
|
|
||||||
):
|
|
||||||
column_types.append(int)
|
column_types.append(int)
|
||||||
else:
|
else:
|
||||||
column_types.append(str)
|
column_types.append(str)
|
||||||
@ -1634,7 +1619,11 @@ def format_output(title, cur, headers, status, settings, explain_mode=False):
|
|||||||
and headers
|
and headers
|
||||||
):
|
):
|
||||||
formatted = formatter.format_output(
|
formatted = formatter.format_output(
|
||||||
cur, headers, format_name="vertical", column_types=None, **output_kwargs
|
cur,
|
||||||
|
headers,
|
||||||
|
format_name="vertical",
|
||||||
|
column_types=column_types,
|
||||||
|
**output_kwargs,
|
||||||
)
|
)
|
||||||
if isinstance(formatted, str):
|
if isinstance(formatted, str):
|
||||||
formatted = iter(formatted.splitlines())
|
formatted = iter(formatted.splitlines())
|
||||||
|
@ -1,155 +1,45 @@
|
|||||||
import logging
|
import logging
|
||||||
import select
|
|
||||||
import traceback
|
import traceback
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
import pgspecial as special
|
import pgspecial as special
|
||||||
import psycopg2
|
import psycopg
|
||||||
import psycopg2.errorcodes
|
import psycopg.sql
|
||||||
import psycopg2.extensions as ext
|
from psycopg.conninfo import make_conninfo
|
||||||
import psycopg2.extras
|
|
||||||
import sqlparse
|
import sqlparse
|
||||||
from psycopg2.extensions import POLL_OK, POLL_READ, POLL_WRITE, make_dsn
|
|
||||||
|
|
||||||
from .packages.parseutils.meta import FunctionMetadata, ForeignKey
|
from .packages.parseutils.meta import FunctionMetadata, ForeignKey
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# Cast all database input to unicode automatically.
|
ViewDef = namedtuple(
|
||||||
# See http://initd.org/psycopg/docs/usage.html#unicode-handling for more info.
|
"ViewDef", "nspname relname relkind viewdef reloptions checkoption"
|
||||||
# pg3: These should be automatic: unicode is the default
|
)
|
||||||
ext.register_type(ext.UNICODE)
|
|
||||||
ext.register_type(ext.UNICODEARRAY)
|
|
||||||
ext.register_type(ext.new_type((705,), "UNKNOWN", ext.UNICODE))
|
|
||||||
# See https://github.com/dbcli/pgcli/issues/426 for more details.
|
|
||||||
# This registers a unicode type caster for datatype 'RECORD'.
|
|
||||||
ext.register_type(ext.new_type((2249,), "RECORD", ext.UNICODE))
|
|
||||||
|
|
||||||
# Cast bytea fields to text. By default, this will render as hex strings with
|
|
||||||
# Postgres 9+ and as escaped binary in earlier versions.
|
|
||||||
ext.register_type(ext.new_type((17,), "BYTEA_TEXT", psycopg2.STRING))
|
|
||||||
|
|
||||||
# TODO: Get default timeout from pgclirc?
|
|
||||||
_WAIT_SELECT_TIMEOUT = 1
|
|
||||||
_wait_callback_is_set = False
|
|
||||||
|
|
||||||
|
|
||||||
# pg3: it is already "green" but Ctrl-C breaks the query
|
def register_typecasters(connection):
|
||||||
# pg3: This should be fixed upstream: https://github.com/psycopg/psycopg/issues/231
|
"""Casts date and timestamp values to string, resolves issues with out-of-range
|
||||||
def _wait_select(conn):
|
dates (e.g. BC) which psycopg can't handle"""
|
||||||
"""
|
for forced_text_type in [
|
||||||
copy-pasted from psycopg2.extras.wait_select
|
"date",
|
||||||
the default implementation doesn't define a timeout in the select calls
|
"time",
|
||||||
"""
|
"timestamp",
|
||||||
try:
|
"timestamptz",
|
||||||
while 1:
|
"bytea",
|
||||||
try:
|
"json",
|
||||||
state = conn.poll()
|
"jsonb",
|
||||||
if state == POLL_OK:
|
]:
|
||||||
break
|
connection.adapters.register_loader(
|
||||||
elif state == POLL_READ:
|
forced_text_type, psycopg.types.string.TextLoader
|
||||||
select.select([conn.fileno()], [], [], _WAIT_SELECT_TIMEOUT)
|
)
|
||||||
elif state == POLL_WRITE:
|
|
||||||
select.select([], [conn.fileno()], [], _WAIT_SELECT_TIMEOUT)
|
|
||||||
else:
|
|
||||||
raise conn.OperationalError("bad state from poll: %s" % state)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
conn.cancel()
|
|
||||||
# the loop will be broken by a server error
|
|
||||||
continue
|
|
||||||
except OSError as e:
|
|
||||||
errno = e.args[0]
|
|
||||||
if errno != 4:
|
|
||||||
raise
|
|
||||||
except psycopg2.OperationalError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def _set_wait_callback(is_virtual_database):
|
|
||||||
global _wait_callback_is_set
|
|
||||||
if _wait_callback_is_set:
|
|
||||||
return
|
|
||||||
_wait_callback_is_set = True
|
|
||||||
if is_virtual_database:
|
|
||||||
return
|
|
||||||
# When running a query, make pressing CTRL+C raise a KeyboardInterrupt
|
|
||||||
# See http://initd.org/psycopg/articles/2014/07/20/cancelling-postgresql-statements-python/
|
|
||||||
# See also https://github.com/psycopg/psycopg2/issues/468
|
|
||||||
ext.set_wait_callback(_wait_select)
|
|
||||||
|
|
||||||
|
|
||||||
# pg3: You can do something like:
|
|
||||||
# pg3: cnn.adapters.register_loader("date", psycopg.types.string.TextLoader)
|
|
||||||
def register_date_typecasters(connection):
|
|
||||||
"""
|
|
||||||
Casts date and timestamp values to string, resolves issues with out of
|
|
||||||
range dates (e.g. BC) which psycopg2 can't handle
|
|
||||||
"""
|
|
||||||
|
|
||||||
def cast_date(value, cursor):
|
|
||||||
return value
|
|
||||||
|
|
||||||
cursor = connection.cursor()
|
|
||||||
cursor.execute("SELECT NULL::date")
|
|
||||||
if cursor.description is None:
|
|
||||||
return
|
|
||||||
date_oid = cursor.description[0][1]
|
|
||||||
cursor.execute("SELECT NULL::timestamp")
|
|
||||||
timestamp_oid = cursor.description[0][1]
|
|
||||||
cursor.execute("SELECT NULL::timestamp with time zone")
|
|
||||||
timestamptz_oid = cursor.description[0][1]
|
|
||||||
oids = (date_oid, timestamp_oid, timestamptz_oid)
|
|
||||||
new_type = psycopg2.extensions.new_type(oids, "DATE", cast_date)
|
|
||||||
psycopg2.extensions.register_type(new_type)
|
|
||||||
|
|
||||||
|
|
||||||
def register_json_typecasters(conn, loads_fn):
|
|
||||||
"""Set the function for converting JSON data for a connection.
|
|
||||||
|
|
||||||
Use the supplied function to decode JSON data returned from the database
|
|
||||||
via the given connection. The function should accept a single argument of
|
|
||||||
the data as a string encoded in the database's character encoding.
|
|
||||||
psycopg2's default handler for JSON data is json.loads.
|
|
||||||
http://initd.org/psycopg/docs/extras.html#json-adaptation
|
|
||||||
|
|
||||||
This function attempts to register the typecaster for both JSON and JSONB
|
|
||||||
types.
|
|
||||||
|
|
||||||
Returns a set that is a subset of {'json', 'jsonb'} indicating which types
|
|
||||||
(if any) were successfully registered.
|
|
||||||
"""
|
|
||||||
available = set()
|
|
||||||
|
|
||||||
for name in ["json", "jsonb"]:
|
|
||||||
try:
|
|
||||||
psycopg2.extras.register_json(conn, loads=loads_fn, name=name)
|
|
||||||
available.add(name)
|
|
||||||
except (psycopg2.ProgrammingError, psycopg2.errors.ProtocolViolation):
|
|
||||||
pass
|
|
||||||
|
|
||||||
return available
|
|
||||||
|
|
||||||
|
|
||||||
# pg3: Probably you don't need this because by default unknown -> unicode
|
|
||||||
def register_hstore_typecaster(conn):
|
|
||||||
"""
|
|
||||||
Instead of using register_hstore() which converts hstore into a python
|
|
||||||
dict, we query the 'oid' of hstore which will be different for each
|
|
||||||
database and register a type caster that converts it to unicode.
|
|
||||||
http://initd.org/psycopg/docs/extras.html#psycopg2.extras.register_hstore
|
|
||||||
"""
|
|
||||||
with conn.cursor() as cur:
|
|
||||||
try:
|
|
||||||
cur.execute(
|
|
||||||
"select t.oid FROM pg_type t WHERE t.typname = 'hstore' and t.typisdefined"
|
|
||||||
)
|
|
||||||
oid = cur.fetchone()[0]
|
|
||||||
ext.register_type(ext.new_type((oid,), "HSTORE", ext.UNICODE))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# pg3: I don't know what is this
|
# pg3: I don't know what is this
|
||||||
class ProtocolSafeCursor(psycopg2.extensions.cursor):
|
class ProtocolSafeCursor(psycopg.Cursor):
|
||||||
|
"""This class wraps and suppresses Protocol Errors with pgbouncer database.
|
||||||
|
See https://github.com/dbcli/pgcli/pull/1097.
|
||||||
|
Pgbouncer database is a virtual database with its own set of commands."""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self.protocol_error = False
|
self.protocol_error = False
|
||||||
self.protocol_message = ""
|
self.protocol_message = ""
|
||||||
@ -170,14 +60,18 @@ class ProtocolSafeCursor(psycopg2.extensions.cursor):
|
|||||||
return (self.protocol_message,)
|
return (self.protocol_message,)
|
||||||
return super().fetchone()
|
return super().fetchone()
|
||||||
|
|
||||||
def execute(self, sql, args=None):
|
# def mogrify(self, query, params):
|
||||||
|
# args = [Literal(v).as_string(self.connection) for v in params]
|
||||||
|
# return query % tuple(args)
|
||||||
|
#
|
||||||
|
def execute(self, *args, **kwargs):
|
||||||
try:
|
try:
|
||||||
psycopg2.extensions.cursor.execute(self, sql, args)
|
super().execute(*args, **kwargs)
|
||||||
self.protocol_error = False
|
self.protocol_error = False
|
||||||
self.protocol_message = ""
|
self.protocol_message = ""
|
||||||
except psycopg2.errors.ProtocolViolation as ex:
|
except psycopg.errors.ProtocolViolation as ex:
|
||||||
self.protocol_error = True
|
self.protocol_error = True
|
||||||
self.protocol_message = ex.pgerror
|
self.protocol_message = str(ex)
|
||||||
_logger.debug("%s: %s" % (ex.__class__.__name__, ex))
|
_logger.debug("%s: %s" % (ex.__class__.__name__, ex))
|
||||||
|
|
||||||
|
|
||||||
@ -290,7 +184,7 @@ class PGExecute:
|
|||||||
conn_params = self._conn_params.copy()
|
conn_params = self._conn_params.copy()
|
||||||
|
|
||||||
new_params = {
|
new_params = {
|
||||||
"database": database,
|
"dbname": database,
|
||||||
"user": user,
|
"user": user,
|
||||||
"password": password,
|
"password": password,
|
||||||
"host": host,
|
"host": host,
|
||||||
@ -303,15 +197,15 @@ class PGExecute:
|
|||||||
new_params = {"dsn": new_params["dsn"], "password": new_params["password"]}
|
new_params = {"dsn": new_params["dsn"], "password": new_params["password"]}
|
||||||
|
|
||||||
if new_params["password"]:
|
if new_params["password"]:
|
||||||
new_params["dsn"] = make_dsn(
|
new_params["dsn"] = make_conninfo(
|
||||||
new_params["dsn"], password=new_params.pop("password")
|
new_params["dsn"], password=new_params.pop("password")
|
||||||
)
|
)
|
||||||
|
|
||||||
conn_params.update({k: v for k, v in new_params.items() if v})
|
conn_params.update({k: v for k, v in new_params.items() if v})
|
||||||
conn_params["cursor_factory"] = ProtocolSafeCursor
|
|
||||||
|
|
||||||
conn = psycopg2.connect(**conn_params)
|
conn_info = make_conninfo(**conn_params)
|
||||||
conn.set_client_encoding("utf8")
|
conn = psycopg.connect(conn_info)
|
||||||
|
conn.cursor_factory = ProtocolSafeCursor
|
||||||
|
|
||||||
self._conn_params = conn_params
|
self._conn_params = conn_params
|
||||||
if self.conn:
|
if self.conn:
|
||||||
@ -322,19 +216,7 @@ class PGExecute:
|
|||||||
# When we connect using a DSN, we don't really know what db,
|
# When we connect using a DSN, we don't really know what db,
|
||||||
# user, etc. we connected to. Let's read it.
|
# user, etc. we connected to. Let's read it.
|
||||||
# Note: moved this after setting autocommit because of #664.
|
# Note: moved this after setting autocommit because of #664.
|
||||||
libpq_version = psycopg2.__libpq_version__
|
dsn_parameters = conn.info.get_parameters()
|
||||||
dsn_parameters = {}
|
|
||||||
if libpq_version >= 93000:
|
|
||||||
# use actual connection info from psycopg2.extensions.Connection.info
|
|
||||||
# as libpq_version > 9.3 is available and required dependency
|
|
||||||
dsn_parameters = conn.info.dsn_parameters
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
dsn_parameters = conn.get_dsn_parameters()
|
|
||||||
except Exception as x:
|
|
||||||
# https://github.com/dbcli/pgcli/issues/1110
|
|
||||||
# PQconninfo not available in libpq < 9.3
|
|
||||||
_logger.info("Exception in get_dsn_parameters: %r", x)
|
|
||||||
|
|
||||||
if dsn_parameters:
|
if dsn_parameters:
|
||||||
self.dbname = dsn_parameters.get("dbname")
|
self.dbname = dsn_parameters.get("dbname")
|
||||||
@ -357,16 +239,14 @@ class PGExecute:
|
|||||||
else self.get_socket_directory()
|
else self.get_socket_directory()
|
||||||
)
|
)
|
||||||
|
|
||||||
self.pid = conn.get_backend_pid()
|
self.pid = conn.info.backend_pid
|
||||||
self.superuser = conn.get_parameter_status("is_superuser") in ("on", "1")
|
self.superuser = conn.info.parameter_status("is_superuser") in ("on", "1")
|
||||||
self.server_version = conn.get_parameter_status("server_version") or ""
|
self.server_version = conn.info.parameter_status("server_version") or ""
|
||||||
|
|
||||||
_set_wait_callback(self.is_virtual_database())
|
# _set_wait_callback(self.is_virtual_database())
|
||||||
|
|
||||||
if not self.is_virtual_database():
|
if not self.is_virtual_database():
|
||||||
register_date_typecasters(conn)
|
register_typecasters(conn)
|
||||||
register_json_typecasters(self.conn, self._json_typecaster)
|
|
||||||
register_hstore_typecaster(self.conn)
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def short_host(self):
|
def short_host(self):
|
||||||
@ -387,27 +267,14 @@ class PGExecute:
|
|||||||
cur.execute(sql)
|
cur.execute(sql)
|
||||||
return cur.fetchone()
|
return cur.fetchone()
|
||||||
|
|
||||||
def _json_typecaster(self, json_data):
|
|
||||||
"""Interpret incoming JSON data as a string.
|
|
||||||
|
|
||||||
The raw data is decoded using the connection's encoding, which defaults
|
|
||||||
to the database's encoding.
|
|
||||||
|
|
||||||
See http://initd.org/psycopg/docs/connection.html#connection.encoding
|
|
||||||
"""
|
|
||||||
|
|
||||||
return json_data
|
|
||||||
|
|
||||||
def failed_transaction(self):
|
def failed_transaction(self):
|
||||||
# pg3: self.conn.info.transaction_status == psycopg.pq.TransactionStatus.INERROR
|
return self.conn.info.transaction_status == psycopg.pq.TransactionStatus.INERROR
|
||||||
status = self.conn.get_transaction_status()
|
|
||||||
return status == ext.TRANSACTION_STATUS_INERROR
|
|
||||||
|
|
||||||
def valid_transaction(self):
|
def valid_transaction(self):
|
||||||
status = self.conn.get_transaction_status()
|
status = self.conn.info.transaction_status
|
||||||
return (
|
return (
|
||||||
status == ext.TRANSACTION_STATUS_ACTIVE
|
status == psycopg.pq.TransactionStatus.ACTIVE
|
||||||
or status == ext.TRANSACTION_STATUS_INTRANS
|
or status == psycopg.pq.TransactionStatus.INTRANS
|
||||||
)
|
)
|
||||||
|
|
||||||
def run(
|
def run(
|
||||||
@ -437,7 +304,7 @@ class PGExecute:
|
|||||||
# Remove spaces and EOL
|
# Remove spaces and EOL
|
||||||
statement = statement.strip()
|
statement = statement.strip()
|
||||||
if not statement: # Empty string
|
if not statement: # Empty string
|
||||||
yield (None, None, None, None, statement, False, False)
|
yield None, None, None, None, statement, False, False
|
||||||
|
|
||||||
# Split the sql into separate queries and run each one.
|
# Split the sql into separate queries and run each one.
|
||||||
for sql in sqlparse.split(statement):
|
for sql in sqlparse.split(statement):
|
||||||
@ -461,7 +328,7 @@ class PGExecute:
|
|||||||
_logger.debug("Trying a pgspecial command. sql: %r", sql)
|
_logger.debug("Trying a pgspecial command. sql: %r", sql)
|
||||||
try:
|
try:
|
||||||
cur = self.conn.cursor()
|
cur = self.conn.cursor()
|
||||||
except psycopg2.InterfaceError:
|
except psycopg.InterfaceError:
|
||||||
# edge case when connection is already closed, but we
|
# edge case when connection is already closed, but we
|
||||||
# don't need cursor for special_cmd.arg_type == NO_QUERY.
|
# don't need cursor for special_cmd.arg_type == NO_QUERY.
|
||||||
# See https://github.com/dbcli/pgcli/issues/1014.
|
# See https://github.com/dbcli/pgcli/issues/1014.
|
||||||
@ -485,7 +352,7 @@ class PGExecute:
|
|||||||
|
|
||||||
# Not a special command, so execute as normal sql
|
# Not a special command, so execute as normal sql
|
||||||
yield self.execute_normal_sql(sql) + (sql, True, False)
|
yield self.execute_normal_sql(sql) + (sql, True, False)
|
||||||
except psycopg2.DatabaseError as e:
|
except psycopg.DatabaseError as e:
|
||||||
_logger.error("sql: %r, error: %r", sql, e)
|
_logger.error("sql: %r, error: %r", sql, e)
|
||||||
_logger.error("traceback: %r", traceback.format_exc())
|
_logger.error("traceback: %r", traceback.format_exc())
|
||||||
|
|
||||||
@ -518,14 +385,24 @@ class PGExecute:
|
|||||||
def execute_normal_sql(self, split_sql):
|
def execute_normal_sql(self, split_sql):
|
||||||
"""Returns tuple (title, rows, headers, status)"""
|
"""Returns tuple (title, rows, headers, status)"""
|
||||||
_logger.debug("Regular sql statement. sql: %r", split_sql)
|
_logger.debug("Regular sql statement. sql: %r", split_sql)
|
||||||
|
|
||||||
|
title = ""
|
||||||
|
|
||||||
|
def handle_notices(n):
|
||||||
|
nonlocal title
|
||||||
|
title = f"{n.message_primary}\n{n.message_detail}\n{title}"
|
||||||
|
|
||||||
|
self.conn.add_notice_handler(handle_notices)
|
||||||
|
|
||||||
|
if self.is_virtual_database() and "show help" in split_sql.lower():
|
||||||
|
# see https://github.com/psycopg/psycopg/issues/303
|
||||||
|
# special case "show help" in pgbouncer
|
||||||
|
res = self.conn.pgconn.exec_(split_sql.encode())
|
||||||
|
return title, None, None, res.command_status.decode()
|
||||||
|
|
||||||
cur = self.conn.cursor()
|
cur = self.conn.cursor()
|
||||||
cur.execute(split_sql)
|
cur.execute(split_sql)
|
||||||
|
|
||||||
# conn.notices persist between queies, we use pop to clear out the list
|
|
||||||
title = ""
|
|
||||||
while len(self.conn.notices) > 0:
|
|
||||||
title = self.conn.notices.pop() + title
|
|
||||||
|
|
||||||
# cur.description will be None for operations that do not return
|
# cur.description will be None for operations that do not return
|
||||||
# rows.
|
# rows.
|
||||||
if cur.description:
|
if cur.description:
|
||||||
@ -546,7 +423,7 @@ class PGExecute:
|
|||||||
_logger.debug("Search path query. sql: %r", self.search_path_query)
|
_logger.debug("Search path query. sql: %r", self.search_path_query)
|
||||||
cur.execute(self.search_path_query)
|
cur.execute(self.search_path_query)
|
||||||
return [x[0] for x in cur.fetchall()]
|
return [x[0] for x in cur.fetchall()]
|
||||||
except psycopg2.ProgrammingError:
|
except psycopg.ProgrammingError:
|
||||||
fallback = "SELECT * FROM current_schemas(true)"
|
fallback = "SELECT * FROM current_schemas(true)"
|
||||||
with self.conn.cursor() as cur:
|
with self.conn.cursor() as cur:
|
||||||
_logger.debug("Search path query. sql: %r", fallback)
|
_logger.debug("Search path query. sql: %r", fallback)
|
||||||
@ -556,9 +433,6 @@ class PGExecute:
|
|||||||
def view_definition(self, spec):
|
def view_definition(self, spec):
|
||||||
"""Returns the SQL defining views described by `spec`"""
|
"""Returns the SQL defining views described by `spec`"""
|
||||||
|
|
||||||
# pg3: you may want to use `psycopg.sql` for client-side composition
|
|
||||||
# pg3: (also available in psycopg2 by the way)
|
|
||||||
template = "CREATE OR REPLACE {6} VIEW {0}.{1} AS \n{3}"
|
|
||||||
# 2: relkind, v or m (materialized)
|
# 2: relkind, v or m (materialized)
|
||||||
# 4: reloptions, null
|
# 4: reloptions, null
|
||||||
# 5: checkoption: local or cascaded
|
# 5: checkoption: local or cascaded
|
||||||
@ -567,11 +441,21 @@ class PGExecute:
|
|||||||
_logger.debug("View Definition Query. sql: %r\nspec: %r", sql, spec)
|
_logger.debug("View Definition Query. sql: %r\nspec: %r", sql, spec)
|
||||||
try:
|
try:
|
||||||
cur.execute(sql, (spec,))
|
cur.execute(sql, (spec,))
|
||||||
except psycopg2.ProgrammingError:
|
except psycopg.ProgrammingError:
|
||||||
raise RuntimeError(f"View {spec} does not exist.")
|
raise RuntimeError(f"View {spec} does not exist.")
|
||||||
result = cur.fetchone()
|
result = ViewDef(*cur.fetchone())
|
||||||
view_type = "MATERIALIZED" if result[2] == "m" else ""
|
if result.relkind == "m":
|
||||||
return template.format(*result + (view_type,))
|
template = "CREATE OR REPLACE MATERIALIZED VIEW {name} AS \n{stmt}"
|
||||||
|
else:
|
||||||
|
template = "CREATE OR REPLACE VIEW {name} AS \n{stmt}"
|
||||||
|
return (
|
||||||
|
psycopg.sql.SQL(template)
|
||||||
|
.format(
|
||||||
|
name=psycopg.sql.Identifier(f"{result.nspname}.{result.relname}"),
|
||||||
|
stmt=psycopg.sql.SQL(result.viewdef),
|
||||||
|
)
|
||||||
|
.as_string(self.conn)
|
||||||
|
)
|
||||||
|
|
||||||
def function_definition(self, spec):
|
def function_definition(self, spec):
|
||||||
"""Returns the SQL defining functions described by `spec`"""
|
"""Returns the SQL defining functions described by `spec`"""
|
||||||
@ -583,7 +467,7 @@ class PGExecute:
|
|||||||
cur.execute(sql, (spec,))
|
cur.execute(sql, (spec,))
|
||||||
result = cur.fetchone()
|
result = cur.fetchone()
|
||||||
return result[0]
|
return result[0]
|
||||||
except psycopg2.ProgrammingError:
|
except psycopg.ProgrammingError:
|
||||||
raise RuntimeError(f"Function {spec} does not exist.")
|
raise RuntimeError(f"Function {spec} does not exist.")
|
||||||
|
|
||||||
def schemata(self):
|
def schemata(self):
|
||||||
@ -607,9 +491,9 @@ class PGExecute:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
with self.conn.cursor() as cur:
|
with self.conn.cursor() as cur:
|
||||||
sql = cur.mogrify(self.tables_query, [kinds])
|
# sql = cur.mogrify(self.tables_query, kinds)
|
||||||
_logger.debug("Tables Query. sql: %r", sql)
|
# _logger.debug("Tables Query. sql: %r", sql)
|
||||||
cur.execute(sql)
|
cur.execute(self.tables_query, [kinds])
|
||||||
yield from cur
|
yield from cur
|
||||||
|
|
||||||
def tables(self):
|
def tables(self):
|
||||||
@ -635,7 +519,7 @@ class PGExecute:
|
|||||||
:return: list of (schema_name, relation_name, column_name, column_type) tuples
|
:return: list of (schema_name, relation_name, column_name, column_type) tuples
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if self.conn.server_version >= 80400:
|
if self.conn.info.server_version >= 80400:
|
||||||
columns_query = """
|
columns_query = """
|
||||||
SELECT nsp.nspname schema_name,
|
SELECT nsp.nspname schema_name,
|
||||||
cls.relname table_name,
|
cls.relname table_name,
|
||||||
@ -676,9 +560,9 @@ class PGExecute:
|
|||||||
ORDER BY 1, 2, att.attnum"""
|
ORDER BY 1, 2, att.attnum"""
|
||||||
|
|
||||||
with self.conn.cursor() as cur:
|
with self.conn.cursor() as cur:
|
||||||
sql = cur.mogrify(columns_query, [kinds])
|
# sql = cur.mogrify(columns_query, kinds)
|
||||||
_logger.debug("Columns Query. sql: %r", sql)
|
# _logger.debug("Columns Query. sql: %r", sql)
|
||||||
cur.execute(sql)
|
cur.execute(columns_query, [kinds])
|
||||||
yield from cur
|
yield from cur
|
||||||
|
|
||||||
def table_columns(self):
|
def table_columns(self):
|
||||||
@ -719,7 +603,7 @@ class PGExecute:
|
|||||||
def foreignkeys(self):
|
def foreignkeys(self):
|
||||||
"""Yields ForeignKey named tuples"""
|
"""Yields ForeignKey named tuples"""
|
||||||
|
|
||||||
if self.conn.server_version < 90000:
|
if self.conn.info.server_version < 90000:
|
||||||
return
|
return
|
||||||
|
|
||||||
with self.conn.cursor() as cur:
|
with self.conn.cursor() as cur:
|
||||||
@ -759,7 +643,7 @@ class PGExecute:
|
|||||||
def functions(self):
|
def functions(self):
|
||||||
"""Yields FunctionMetadata named tuples"""
|
"""Yields FunctionMetadata named tuples"""
|
||||||
|
|
||||||
if self.conn.server_version >= 110000:
|
if self.conn.info.server_version >= 110000:
|
||||||
query = """
|
query = """
|
||||||
SELECT n.nspname schema_name,
|
SELECT n.nspname schema_name,
|
||||||
p.proname func_name,
|
p.proname func_name,
|
||||||
@ -779,7 +663,7 @@ class PGExecute:
|
|||||||
WHERE p.prorettype::regtype != 'trigger'::regtype
|
WHERE p.prorettype::regtype != 'trigger'::regtype
|
||||||
ORDER BY 1, 2
|
ORDER BY 1, 2
|
||||||
"""
|
"""
|
||||||
elif self.conn.server_version > 90000:
|
elif self.conn.info.server_version > 90000:
|
||||||
query = """
|
query = """
|
||||||
SELECT n.nspname schema_name,
|
SELECT n.nspname schema_name,
|
||||||
p.proname func_name,
|
p.proname func_name,
|
||||||
@ -799,7 +683,7 @@ class PGExecute:
|
|||||||
WHERE p.prorettype::regtype != 'trigger'::regtype
|
WHERE p.prorettype::regtype != 'trigger'::regtype
|
||||||
ORDER BY 1, 2
|
ORDER BY 1, 2
|
||||||
"""
|
"""
|
||||||
elif self.conn.server_version >= 80400:
|
elif self.conn.info.server_version >= 80400:
|
||||||
query = """
|
query = """
|
||||||
SELECT n.nspname schema_name,
|
SELECT n.nspname schema_name,
|
||||||
p.proname func_name,
|
p.proname func_name,
|
||||||
@ -850,7 +734,7 @@ class PGExecute:
|
|||||||
"""Yields tuples of (schema_name, type_name)"""
|
"""Yields tuples of (schema_name, type_name)"""
|
||||||
|
|
||||||
with self.conn.cursor() as cur:
|
with self.conn.cursor() as cur:
|
||||||
if self.conn.server_version > 90000:
|
if self.conn.info.server_version > 90000:
|
||||||
query = """
|
query = """
|
||||||
SELECT n.nspname schema_name,
|
SELECT n.nspname schema_name,
|
||||||
t.typname type_name
|
t.typname type_name
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
pytest>=2.7.0
|
pytest>=2.7.0
|
||||||
tox>=1.9.2
|
tox>=1.9.2
|
||||||
behave>=1.2.4
|
behave>=1.2.4
|
||||||
|
black>=22.3.0
|
||||||
pexpect==3.3
|
pexpect==3.3
|
||||||
pre-commit>=1.16.0
|
pre-commit>=1.16.0
|
||||||
coverage==5.0.4
|
coverage>=5.0.4
|
||||||
codecov>=1.5.1
|
codecov>=1.5.1
|
||||||
docutils>=0.13.1
|
docutils>=0.13.1
|
||||||
autopep8==1.3.3
|
autopep8>=1.3.3
|
||||||
click==6.7
|
twine>=1.11.0
|
||||||
twine==1.11.0
|
wheel>=0.33.6
|
||||||
wheel==0.33.6
|
|
||||||
prompt_toolkit==3.0.5
|
|
||||||
|
14
setup.py
14
setup.py
@ -6,13 +6,13 @@ from pgcli import __version__
|
|||||||
description = "CLI for Postgres Database. With auto-completion and syntax highlighting."
|
description = "CLI for Postgres Database. With auto-completion and syntax highlighting."
|
||||||
|
|
||||||
install_requirements = [
|
install_requirements = [
|
||||||
"pgspecial>=1.13.1,<2.0.0",
|
"pgspecial>=2.0.0",
|
||||||
"click >= 4.1",
|
"click >= 4.1",
|
||||||
"Pygments>=2.0", # Pygments has to be Capitalcased. WTF?
|
"Pygments>=2.0", # Pygments has to be Capitalcased. WTF?
|
||||||
# We still need to use pt-2 unless pt-3 released on Fedora32
|
# We still need to use pt-2 unless pt-3 released on Fedora32
|
||||||
# see: https://github.com/dbcli/pgcli/pull/1197
|
# see: https://github.com/dbcli/pgcli/pull/1197
|
||||||
"prompt_toolkit>=2.0.6,<4.0.0",
|
"prompt_toolkit>=2.0.6,<4.0.0",
|
||||||
"psycopg2 >= 2.8",
|
"psycopg >= 3.0.14",
|
||||||
"sqlparse >=0.3.0,<0.5",
|
"sqlparse >=0.3.0,<0.5",
|
||||||
"configobj >= 5.0.6",
|
"configobj >= 5.0.6",
|
||||||
"pendulum>=2.1.0",
|
"pendulum>=2.1.0",
|
||||||
@ -34,19 +34,19 @@ setup(
|
|||||||
version=__version__,
|
version=__version__,
|
||||||
license="BSD",
|
license="BSD",
|
||||||
url="http://pgcli.com",
|
url="http://pgcli.com",
|
||||||
project_urls={
|
|
||||||
"Source": "https://github.com/dbcli/pgcli",
|
|
||||||
},
|
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
package_data={"pgcli": ["pgclirc", "packages/pgliterals/pgliterals.json"]},
|
package_data={"pgcli": ["pgclirc", "packages/pgliterals/pgliterals.json"]},
|
||||||
description=description,
|
description=description,
|
||||||
long_description=open("README.rst").read(),
|
long_description=open("README.rst").read(),
|
||||||
install_requires=install_requirements,
|
install_requires=install_requirements,
|
||||||
|
dependency_links=[
|
||||||
|
"http://github.com/psycopg/repo/tarball/master#egg=psycopg-3.0.10"
|
||||||
|
],
|
||||||
extras_require={
|
extras_require={
|
||||||
"keyring": ["keyring >= 12.2.0"],
|
"keyring": ["keyring >= 12.2.0"],
|
||||||
"sshtunnel": ["sshtunnel >= 0.4.0"],
|
"sshtunnel": ["sshtunnel >= 0.4.0"],
|
||||||
},
|
},
|
||||||
python_requires=">=3.6",
|
python_requires=">=3.7",
|
||||||
entry_points="""
|
entry_points="""
|
||||||
[console_scripts]
|
[console_scripts]
|
||||||
pgcli=pgcli.main:cli
|
pgcli=pgcli.main:cli
|
||||||
@ -57,10 +57,10 @@ setup(
|
|||||||
"Operating System :: Unix",
|
"Operating System :: Unix",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.6",
|
|
||||||
"Programming Language :: Python :: 3.7",
|
"Programming Language :: Python :: 3.7",
|
||||||
"Programming Language :: Python :: 3.8",
|
"Programming Language :: Python :: 3.8",
|
||||||
"Programming Language :: Python :: 3.9",
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: SQL",
|
"Programming Language :: SQL",
|
||||||
"Topic :: Database",
|
"Topic :: Database",
|
||||||
"Topic :: Database :: Front-Ends",
|
"Topic :: Database :: Front-Ends",
|
||||||
|
@ -49,7 +49,6 @@ Feature: run the cli,
|
|||||||
when we send "\?" command
|
when we send "\?" command
|
||||||
then we see help output
|
then we see help output
|
||||||
|
|
||||||
@wip
|
|
||||||
Scenario: run the cli with dsn and password
|
Scenario: run the cli with dsn and password
|
||||||
When we launch dbcli using dsn_password
|
When we launch dbcli using dsn_password
|
||||||
then we send password
|
then we send password
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from psycopg2 import connect
|
from psycopg import connect
|
||||||
from psycopg2.extensions import AsIs
|
|
||||||
|
|
||||||
|
|
||||||
def create_db(
|
def create_db(
|
||||||
@ -17,13 +16,10 @@ def create_db(
|
|||||||
"""
|
"""
|
||||||
cn = create_cn(hostname, password, username, "postgres", port)
|
cn = create_cn(hostname, password, username, "postgres", port)
|
||||||
|
|
||||||
# ISOLATION_LEVEL_AUTOCOMMIT = 0
|
cn.autocommit = True
|
||||||
# Needed for DB creation.
|
|
||||||
cn.set_isolation_level(0)
|
|
||||||
|
|
||||||
with cn.cursor() as cr:
|
with cn.cursor() as cr:
|
||||||
cr.execute("drop database if exists %s", (AsIs(dbname),))
|
cr.execute(f"drop database if exists {dbname}")
|
||||||
cr.execute("create database %s", (AsIs(dbname),))
|
cr.execute(f"create database {dbname}")
|
||||||
|
|
||||||
cn.close()
|
cn.close()
|
||||||
|
|
||||||
@ -41,13 +37,26 @@ def create_cn(hostname, password, username, dbname, port):
|
|||||||
:return: psycopg2.connection
|
:return: psycopg2.connection
|
||||||
"""
|
"""
|
||||||
cn = connect(
|
cn = connect(
|
||||||
host=hostname, user=username, database=dbname, password=password, port=port
|
host=hostname, user=username, dbname=dbname, password=password, port=port
|
||||||
)
|
)
|
||||||
|
|
||||||
print(f"Created connection: {cn.dsn}.")
|
print(f"Created connection: {cn.info.get_parameters()}.")
|
||||||
return cn
|
return cn
|
||||||
|
|
||||||
|
|
||||||
|
def pgbouncer_available(hostname="localhost", password=None, username="postgres"):
|
||||||
|
cn = None
|
||||||
|
try:
|
||||||
|
cn = create_cn(hostname, password, username, "pgbouncer", 6432)
|
||||||
|
return True
|
||||||
|
except:
|
||||||
|
print("Pgbouncer is not available.")
|
||||||
|
finally:
|
||||||
|
if cn:
|
||||||
|
cn.close()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
def drop_db(hostname="localhost", username=None, password=None, dbname=None, port=None):
|
def drop_db(hostname="localhost", username=None, password=None, dbname=None, port=None):
|
||||||
"""
|
"""
|
||||||
Drop database.
|
Drop database.
|
||||||
@ -58,12 +67,11 @@ def drop_db(hostname="localhost", username=None, password=None, dbname=None, por
|
|||||||
"""
|
"""
|
||||||
cn = create_cn(hostname, password, username, "postgres", port)
|
cn = create_cn(hostname, password, username, "postgres", port)
|
||||||
|
|
||||||
# ISOLATION_LEVEL_AUTOCOMMIT = 0
|
|
||||||
# Needed for DB drop.
|
# Needed for DB drop.
|
||||||
cn.set_isolation_level(0)
|
cn.autocommit = True
|
||||||
|
|
||||||
with cn.cursor() as cr:
|
with cn.cursor() as cr:
|
||||||
cr.execute("drop database if exists %s", (AsIs(dbname),))
|
cr.execute(f"drop database if exists {dbname}")
|
||||||
|
|
||||||
close_cn(cn)
|
close_cn(cn)
|
||||||
|
|
||||||
@ -74,5 +82,6 @@ def close_cn(cn=None):
|
|||||||
:param connection: psycopg2.connection
|
:param connection: psycopg2.connection
|
||||||
"""
|
"""
|
||||||
if cn:
|
if cn:
|
||||||
|
cn_params = cn.info.get_parameters()
|
||||||
cn.close()
|
cn.close()
|
||||||
print(f"Closed connection: {cn.dsn}.")
|
print(f"Closed connection: {cn_params}.")
|
||||||
|
@ -111,7 +111,11 @@ def before_all(context):
|
|||||||
context.conf["dbname"],
|
context.conf["dbname"],
|
||||||
context.conf["port"],
|
context.conf["port"],
|
||||||
)
|
)
|
||||||
|
context.pgbouncer_available = dbutils.pgbouncer_available(
|
||||||
|
hostname=context.conf["host"],
|
||||||
|
password=context.conf["pass"],
|
||||||
|
username=context.conf["user"],
|
||||||
|
)
|
||||||
context.fixture_data = fixutils.read_fixture_files()
|
context.fixture_data = fixutils.read_fixture_files()
|
||||||
|
|
||||||
# use temporary directory as config home
|
# use temporary directory as config home
|
||||||
@ -164,7 +168,19 @@ def before_scenario(context, scenario):
|
|||||||
if scenario.name == "list databases":
|
if scenario.name == "list databases":
|
||||||
# not using the cli for that
|
# not using the cli for that
|
||||||
return
|
return
|
||||||
wrappers.run_cli(context)
|
currentdb = None
|
||||||
|
if "pgbouncer" in scenario.feature.tags:
|
||||||
|
if context.pgbouncer_available:
|
||||||
|
os.environ["PGDATABASE"] = "pgbouncer"
|
||||||
|
os.environ["PGPORT"] = "6432"
|
||||||
|
currentdb = "pgbouncer"
|
||||||
|
else:
|
||||||
|
scenario.skip()
|
||||||
|
else:
|
||||||
|
# set env vars back to normal test database
|
||||||
|
os.environ["PGDATABASE"] = context.conf["dbname"]
|
||||||
|
os.environ["PGPORT"] = context.conf["port"]
|
||||||
|
wrappers.run_cli(context, currentdb=currentdb)
|
||||||
wrappers.wait_prompt(context)
|
wrappers.wait_prompt(context)
|
||||||
|
|
||||||
|
|
||||||
@ -172,13 +188,17 @@ def after_scenario(context, scenario):
|
|||||||
"""Cleans up after each scenario completes."""
|
"""Cleans up after each scenario completes."""
|
||||||
if hasattr(context, "cli") and context.cli and not context.exit_sent:
|
if hasattr(context, "cli") and context.cli and not context.exit_sent:
|
||||||
# Quit nicely.
|
# Quit nicely.
|
||||||
if not context.atprompt:
|
if not getattr(context, "atprompt", False):
|
||||||
dbname = context.currentdb
|
dbname = context.currentdb
|
||||||
context.cli.expect_exact(f"{dbname}> ", timeout=15)
|
context.cli.expect_exact(f"{dbname}>", timeout=5)
|
||||||
context.cli.sendcontrol("c")
|
|
||||||
context.cli.sendcontrol("d")
|
|
||||||
try:
|
try:
|
||||||
context.cli.expect_exact(pexpect.EOF, timeout=15)
|
context.cli.sendcontrol("c")
|
||||||
|
context.cli.sendcontrol("d")
|
||||||
|
except Exception as x:
|
||||||
|
print("Failed cleanup after scenario:")
|
||||||
|
print(x)
|
||||||
|
try:
|
||||||
|
context.cli.expect_exact(pexpect.EOF, timeout=5)
|
||||||
except pexpect.TIMEOUT:
|
except pexpect.TIMEOUT:
|
||||||
print(f"--- after_scenario {scenario.name}: kill cli")
|
print(f"--- after_scenario {scenario.name}: kill cli")
|
||||||
context.cli.kill(signal.SIGKILL)
|
context.cli.kill(signal.SIGKILL)
|
||||||
|
12
tests/features/pgbouncer.feature
Normal file
12
tests/features/pgbouncer.feature
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
@pgbouncer
|
||||||
|
Feature: run pgbouncer,
|
||||||
|
call the help command,
|
||||||
|
exit the cli
|
||||||
|
|
||||||
|
Scenario: run "show help" command
|
||||||
|
When we send "show help" command
|
||||||
|
then we see the pgbouncer help output
|
||||||
|
|
||||||
|
Scenario: run the cli and exit
|
||||||
|
When we send "ctrl + d"
|
||||||
|
then dbcli exits
|
@ -69,7 +69,7 @@ def step_ctrl_d(context):
|
|||||||
context.cli.sendline(r"\pset pager off")
|
context.cli.sendline(r"\pset pager off")
|
||||||
wrappers.wait_prompt(context)
|
wrappers.wait_prompt(context)
|
||||||
context.cli.sendcontrol("d")
|
context.cli.sendcontrol("d")
|
||||||
context.cli.expect(pexpect.EOF, timeout=15)
|
context.cli.expect(pexpect.EOF, timeout=5)
|
||||||
context.exit_sent = True
|
context.exit_sent = True
|
||||||
|
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ def step_see_prompt(context):
|
|||||||
Wait to see the prompt.
|
Wait to see the prompt.
|
||||||
"""
|
"""
|
||||||
db_name = getattr(context, "currentdb", context.conf["dbname"])
|
db_name = getattr(context, "currentdb", context.conf["dbname"])
|
||||||
wrappers.expect_exact(context, f"{db_name}> ", timeout=5)
|
wrappers.expect_exact(context, f"{db_name}>", timeout=5)
|
||||||
context.atprompt = True
|
context.atprompt = True
|
||||||
|
|
||||||
|
|
||||||
|
22
tests/features/steps/pgbouncer.py
Normal file
22
tests/features/steps/pgbouncer.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
"""
|
||||||
|
Steps for behavioral style tests are defined in this module.
|
||||||
|
Each step is defined by the string decorating it.
|
||||||
|
This string is used to call the step in "*.feature" file.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from behave import when, then
|
||||||
|
import wrappers
|
||||||
|
|
||||||
|
|
||||||
|
@when('we send "show help" command')
|
||||||
|
def step_send_help_command(context):
|
||||||
|
context.cli.sendline("show help")
|
||||||
|
|
||||||
|
|
||||||
|
@then("we see the pgbouncer help output")
|
||||||
|
def see_pgbouncer_help(context):
|
||||||
|
wrappers.expect_exact(
|
||||||
|
context,
|
||||||
|
"SHOW HELP|CONFIG|DATABASES|POOLS|CLIENTS|SERVERS|USERS|VERSION",
|
||||||
|
timeout=3,
|
||||||
|
)
|
@ -70,4 +70,5 @@ def run_cli(context, run_args=None, prompt_check=True, currentdb=None):
|
|||||||
|
|
||||||
def wait_prompt(context):
|
def wait_prompt(context):
|
||||||
"""Make sure prompt is displayed."""
|
"""Make sure prompt is displayed."""
|
||||||
expect_exact(context, "{0}> ".format(context.conf["dbname"]), timeout=5)
|
prompt_str = "{0}>".format(context.currentdb)
|
||||||
|
expect_exact(context, [prompt_str + " ", prompt_str, pexpect.EOF], timeout=3)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
from textwrap import dedent
|
from textwrap import dedent
|
||||||
|
|
||||||
import psycopg2
|
import psycopg
|
||||||
import pytest
|
import pytest
|
||||||
from unittest.mock import patch, MagicMock
|
from unittest.mock import patch, MagicMock
|
||||||
from pgspecial.main import PGSpecial, NO_QUERY
|
from pgspecial.main import PGSpecial, NO_QUERY
|
||||||
@ -428,7 +428,7 @@ def test_describe_special(executor, command, verbose, pattern, pgspecial):
|
|||||||
@dbtest
|
@dbtest
|
||||||
@pytest.mark.parametrize("sql", ["invalid sql", "SELECT 1; select error;"])
|
@pytest.mark.parametrize("sql", ["invalid sql", "SELECT 1; select error;"])
|
||||||
def test_raises_with_no_formatter(executor, sql):
|
def test_raises_with_no_formatter(executor, sql):
|
||||||
with pytest.raises(psycopg2.ProgrammingError):
|
with pytest.raises(psycopg.ProgrammingError):
|
||||||
list(executor.run(sql))
|
list(executor.run(sql))
|
||||||
|
|
||||||
|
|
||||||
@ -513,13 +513,6 @@ def test_short_host(executor):
|
|||||||
assert executor.short_host == "localhost1"
|
assert executor.short_host == "localhost1"
|
||||||
|
|
||||||
|
|
||||||
class BrokenConnection:
|
|
||||||
"""Mock a connection that failed."""
|
|
||||||
|
|
||||||
def cursor(self):
|
|
||||||
raise psycopg2.InterfaceError("I'm broken!")
|
|
||||||
|
|
||||||
|
|
||||||
class VirtualCursor:
|
class VirtualCursor:
|
||||||
"""Mock a cursor to virtual database like pgbouncer."""
|
"""Mock a cursor to virtual database like pgbouncer."""
|
||||||
|
|
||||||
@ -549,13 +542,15 @@ def test_exit_without_active_connection(executor):
|
|||||||
aliases=(":q",),
|
aliases=(":q",),
|
||||||
)
|
)
|
||||||
|
|
||||||
with patch.object(executor, "conn", BrokenConnection()):
|
with patch.object(
|
||||||
|
executor.conn, "cursor", side_effect=psycopg.InterfaceError("I'm broken!")
|
||||||
|
):
|
||||||
# we should be able to quit the app, even without active connection
|
# we should be able to quit the app, even without active connection
|
||||||
run(executor, "\\q", pgspecial=pgspecial)
|
run(executor, "\\q", pgspecial=pgspecial)
|
||||||
quit_handler.assert_called_once()
|
quit_handler.assert_called_once()
|
||||||
|
|
||||||
# an exception should be raised when running a query without active connection
|
# an exception should be raised when running a query without active connection
|
||||||
with pytest.raises(psycopg2.InterfaceError):
|
with pytest.raises(psycopg.InterfaceError):
|
||||||
run(executor, "select 1", pgspecial=pgspecial)
|
run(executor, "select 1", pgspecial=pgspecial)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
= Gross Checks =
|
|
||||||
* [ ] Check connecting to a local database.
|
|
||||||
* [ ] Check connecting to a remote database.
|
|
||||||
* [ ] Check connecting to a database with a user/password.
|
|
||||||
* [ ] Check connecting to a non-existent database.
|
|
||||||
* [ ] Test changing the database.
|
|
||||||
|
|
||||||
== PGExecute ==
|
|
||||||
* [ ] Test successful execution given a cursor.
|
|
||||||
* [ ] Test unsuccessful execution with a syntax error.
|
|
||||||
* [ ] Test a series of executions with the same cursor without failure.
|
|
||||||
* [ ] Test a series of executions with the same cursor with failure.
|
|
||||||
* [ ] Test passing in a special command.
|
|
||||||
|
|
||||||
== Naive Autocompletion ==
|
|
||||||
* [ ] Input empty string, ask for completions - Everything.
|
|
||||||
* [ ] Input partial prefix, ask for completions - Stars with prefix.
|
|
||||||
* [ ] Input fully autocompleted string, ask for completions - Only full match
|
|
||||||
* [ ] Input non-existent prefix, ask for completions - nothing
|
|
||||||
* [ ] Input lowercase prefix - case insensitive completions
|
|
||||||
|
|
||||||
== Smart Autocompletion ==
|
|
||||||
* [ ] Input empty string and check if only keywords are returned.
|
|
||||||
* [ ] Input SELECT prefix and check if only columns are returned.
|
|
||||||
* [ ] Input SELECT blah - only keywords are returned.
|
|
||||||
* [ ] Input SELECT * FROM - Table names only
|
|
||||||
|
|
||||||
== PGSpecial ==
|
|
||||||
* [ ] Test \d
|
|
||||||
* [ ] Test \d tablename
|
|
||||||
* [ ] Test \d tablena*
|
|
||||||
* [ ] Test \d non-existent-tablename
|
|
||||||
* [ ] Test \d index
|
|
||||||
* [ ] Test \d sequence
|
|
||||||
* [ ] Test \d view
|
|
||||||
|
|
||||||
== Exceptionals ==
|
|
||||||
* [ ] Test the 'use' command to change db.
|
|
@ -1,8 +1,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
import psycopg2
|
import psycopg
|
||||||
import psycopg2.extras
|
|
||||||
from pgcli.main import format_output, OutputSettings
|
from pgcli.main import format_output, OutputSettings
|
||||||
from pgcli.pgexecute import register_json_typecasters
|
|
||||||
from os import getenv
|
from os import getenv
|
||||||
|
|
||||||
POSTGRES_USER = getenv("PGUSER", "postgres")
|
POSTGRES_USER = getenv("PGUSER", "postgres")
|
||||||
@ -12,12 +10,12 @@ POSTGRES_PASSWORD = getenv("PGPASSWORD", "postgres")
|
|||||||
|
|
||||||
|
|
||||||
def db_connection(dbname=None):
|
def db_connection(dbname=None):
|
||||||
conn = psycopg2.connect(
|
conn = psycopg.connect(
|
||||||
user=POSTGRES_USER,
|
user=POSTGRES_USER,
|
||||||
host=POSTGRES_HOST,
|
host=POSTGRES_HOST,
|
||||||
password=POSTGRES_PASSWORD,
|
password=POSTGRES_PASSWORD,
|
||||||
port=POSTGRES_PORT,
|
port=POSTGRES_PORT,
|
||||||
database=dbname,
|
dbname=dbname,
|
||||||
)
|
)
|
||||||
conn.autocommit = True
|
conn.autocommit = True
|
||||||
return conn
|
return conn
|
||||||
@ -26,11 +24,10 @@ def db_connection(dbname=None):
|
|||||||
try:
|
try:
|
||||||
conn = db_connection()
|
conn = db_connection()
|
||||||
CAN_CONNECT_TO_DB = True
|
CAN_CONNECT_TO_DB = True
|
||||||
SERVER_VERSION = conn.server_version
|
SERVER_VERSION = conn.info.parameter_status("server_version")
|
||||||
json_types = register_json_typecasters(conn, lambda x: x)
|
JSON_AVAILABLE = True
|
||||||
JSON_AVAILABLE = "json" in json_types
|
JSONB_AVAILABLE = True
|
||||||
JSONB_AVAILABLE = "jsonb" in json_types
|
except Exception as x:
|
||||||
except:
|
|
||||||
CAN_CONNECT_TO_DB = JSON_AVAILABLE = JSONB_AVAILABLE = False
|
CAN_CONNECT_TO_DB = JSON_AVAILABLE = JSONB_AVAILABLE = False
|
||||||
SERVER_VERSION = 0
|
SERVER_VERSION = 0
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user