2024-06-09 15:52:20 +03:00
|
|
|
# SPDX-License-Identifier: GPL-3.0-or-later
|
2022-01-03 18:23:58 +03:00
|
|
|
#
|
|
|
|
# This file is part of Nominatim. (https://nominatim.org)
|
|
|
|
#
|
2024-06-09 15:52:20 +03:00
|
|
|
# Copyright (C) 2024 by the Nominatim developer community.
|
2022-01-03 18:23:58 +03:00
|
|
|
# For a full list of authors see the git log.
|
2021-01-27 00:45:24 +03:00
|
|
|
import itertools
|
2021-01-15 16:42:03 +03:00
|
|
|
import sys
|
|
|
|
from pathlib import Path
|
|
|
|
|
2021-01-20 22:24:39 +03:00
|
|
|
import psycopg2
|
|
|
|
import pytest
|
|
|
|
|
2021-01-15 16:42:03 +03:00
|
|
|
# always test against the source
|
2021-11-30 20:01:46 +03:00
|
|
|
SRC_DIR = (Path(__file__) / '..' / '..' / '..').resolve()
|
2024-06-09 15:52:20 +03:00
|
|
|
sys.path.insert(0, str(SRC_DIR / 'src'))
|
2021-01-23 19:25:14 +03:00
|
|
|
|
2024-06-09 15:52:20 +03:00
|
|
|
from nominatim_core.config import Configuration
|
|
|
|
from nominatim_core.db import connection
|
|
|
|
from nominatim_core.db.sql_preprocessor import SQLPreprocessor
|
|
|
|
import nominatim_db.tokenizer.factory
|
2021-04-24 12:25:47 +03:00
|
|
|
|
|
|
|
import dummy_tokenizer
|
2021-05-13 00:30:45 +03:00
|
|
|
import mocks
|
2021-05-19 13:11:04 +03:00
|
|
|
from cursor import CursorForTesting
|
2021-02-26 17:02:39 +03:00
|
|
|
|
2021-02-17 23:43:15 +03:00
|
|
|
|
2021-11-30 20:01:46 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def src_dir():
|
|
|
|
return SRC_DIR
|
|
|
|
|
|
|
|
|
2021-01-20 22:24:39 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def temp_db(monkeypatch):
|
2021-01-23 19:25:14 +03:00
|
|
|
""" Create an empty database for the test. The database name is also
|
|
|
|
exported into NOMINATIM_DATABASE_DSN.
|
|
|
|
"""
|
2021-01-20 22:24:39 +03:00
|
|
|
name = 'test_nominatim_python_unittest'
|
2021-01-27 00:45:24 +03:00
|
|
|
conn = psycopg2.connect(database='postgres')
|
|
|
|
|
|
|
|
conn.set_isolation_level(0)
|
|
|
|
with conn.cursor() as cur:
|
|
|
|
cur.execute('DROP DATABASE IF EXISTS {}'.format(name))
|
|
|
|
cur.execute('CREATE DATABASE {}'.format(name))
|
|
|
|
|
|
|
|
conn.close()
|
2021-01-20 22:24:39 +03:00
|
|
|
|
2021-05-19 11:30:36 +03:00
|
|
|
monkeypatch.setenv('NOMINATIM_DATABASE_DSN', 'dbname=' + name)
|
2021-01-20 22:24:39 +03:00
|
|
|
|
|
|
|
yield name
|
|
|
|
|
2021-01-27 00:45:24 +03:00
|
|
|
conn = psycopg2.connect(database='postgres')
|
2021-01-23 19:25:14 +03:00
|
|
|
|
2021-01-27 00:45:24 +03:00
|
|
|
conn.set_isolation_level(0)
|
|
|
|
with conn.cursor() as cur:
|
|
|
|
cur.execute('DROP DATABASE IF EXISTS {}'.format(name))
|
|
|
|
|
|
|
|
conn.close()
|
|
|
|
|
2021-02-25 00:02:13 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def dsn(temp_db):
|
|
|
|
return 'dbname=' + temp_db
|
|
|
|
|
|
|
|
|
2021-01-27 00:45:24 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def temp_db_with_extensions(temp_db):
|
|
|
|
conn = psycopg2.connect(database=temp_db)
|
|
|
|
with conn.cursor() as cur:
|
|
|
|
cur.execute('CREATE EXTENSION hstore; CREATE EXTENSION postgis;')
|
|
|
|
conn.commit()
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
return temp_db
|
2021-01-23 19:25:14 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def temp_db_conn(temp_db):
|
|
|
|
""" Connection to the test database.
|
|
|
|
"""
|
2021-02-23 12:11:21 +03:00
|
|
|
with connection.connect('dbname=' + temp_db) as conn:
|
|
|
|
yield conn
|
2021-01-23 19:25:14 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def temp_db_cursor(temp_db):
|
|
|
|
""" Connection and cursor towards the test database. The connection will
|
|
|
|
be in auto-commit mode.
|
|
|
|
"""
|
|
|
|
conn = psycopg2.connect('dbname=' + temp_db)
|
|
|
|
conn.set_isolation_level(0)
|
2021-05-19 13:11:04 +03:00
|
|
|
with conn.cursor(cursor_factory=CursorForTesting) as cur:
|
2021-01-23 19:25:14 +03:00
|
|
|
yield cur
|
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
2021-02-25 00:02:13 +03:00
|
|
|
@pytest.fixture
|
|
|
|
def table_factory(temp_db_cursor):
|
2021-05-19 17:42:35 +03:00
|
|
|
""" A fixture that creates new SQL tables, potentially filled with
|
|
|
|
content.
|
|
|
|
"""
|
2021-02-26 17:02:39 +03:00
|
|
|
def mk_table(name, definition='id INT', content=None):
|
2021-02-25 00:02:13 +03:00
|
|
|
temp_db_cursor.execute('CREATE TABLE {} ({})'.format(name, definition))
|
2021-02-26 17:02:39 +03:00
|
|
|
if content is not None:
|
2021-05-19 11:51:10 +03:00
|
|
|
temp_db_cursor.execute_values("INSERT INTO {} VALUES %s".format(name), content)
|
2021-02-25 00:02:13 +03:00
|
|
|
|
|
|
|
return mk_table
|
|
|
|
|
|
|
|
|
2021-01-23 19:25:14 +03:00
|
|
|
@pytest.fixture
|
2022-11-27 00:00:43 +03:00
|
|
|
def def_config():
|
|
|
|
cfg = Configuration(None)
|
|
|
|
cfg.set_libdirs(module='.', osm2pgsql='.')
|
2021-04-19 10:23:37 +03:00
|
|
|
return cfg
|
2021-01-27 00:45:24 +03:00
|
|
|
|
2021-05-19 17:03:54 +03:00
|
|
|
|
2021-02-24 00:50:23 +03:00
|
|
|
@pytest.fixture
|
2022-11-27 00:00:43 +03:00
|
|
|
def project_env(tmp_path):
|
2021-11-30 20:01:46 +03:00
|
|
|
projdir = tmp_path / 'project'
|
|
|
|
projdir.mkdir()
|
2022-11-27 00:00:43 +03:00
|
|
|
cfg = Configuration(projdir)
|
|
|
|
cfg.set_libdirs(module='.', osm2pgsql='.')
|
2021-11-30 20:01:46 +03:00
|
|
|
return cfg
|
2021-01-27 00:45:24 +03:00
|
|
|
|
2021-05-19 17:03:54 +03:00
|
|
|
|
2021-04-21 10:57:17 +03:00
|
|
|
@pytest.fixture
|
2021-05-20 00:07:39 +03:00
|
|
|
def property_table(table_factory, temp_db_conn):
|
2021-04-21 10:57:17 +03:00
|
|
|
table_factory('nominatim_properties', 'property TEXT, value TEXT')
|
|
|
|
|
2021-05-20 00:07:39 +03:00
|
|
|
return mocks.MockPropertyTable(temp_db_conn)
|
|
|
|
|
2021-05-19 17:42:35 +03:00
|
|
|
|
2021-01-27 00:45:24 +03:00
|
|
|
@pytest.fixture
|
2021-05-19 18:37:03 +03:00
|
|
|
def status_table(table_factory):
|
2021-01-30 17:50:34 +03:00
|
|
|
""" Create an empty version of the status table and
|
|
|
|
the status logging table.
|
2021-01-27 00:45:24 +03:00
|
|
|
"""
|
2021-05-19 18:37:03 +03:00
|
|
|
table_factory('import_status',
|
|
|
|
"""lastimportdate timestamp with time zone NOT NULL,
|
|
|
|
sequence_id integer,
|
|
|
|
indexed boolean""")
|
|
|
|
table_factory('import_osmosis_log',
|
|
|
|
"""batchend timestamp,
|
|
|
|
batchseq integer,
|
|
|
|
batchsize bigint,
|
|
|
|
starttime timestamp,
|
|
|
|
endtime timestamp,
|
|
|
|
event text""")
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def place_table(temp_db_with_extensions, table_factory):
|
2021-01-27 00:45:24 +03:00
|
|
|
""" Create an empty version of the place table.
|
|
|
|
"""
|
2021-05-19 18:37:03 +03:00
|
|
|
table_factory('place',
|
|
|
|
"""osm_id int8 NOT NULL,
|
|
|
|
osm_type char(1) NOT NULL,
|
|
|
|
class text NOT NULL,
|
|
|
|
type text NOT NULL,
|
|
|
|
name hstore,
|
|
|
|
admin_level smallint,
|
|
|
|
address hstore,
|
|
|
|
extratags hstore,
|
|
|
|
geometry Geometry(Geometry,4326) NOT NULL""")
|
2021-01-27 00:45:24 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def place_row(place_table, temp_db_cursor):
|
|
|
|
""" A factory for rows in the place table. The table is created as a
|
|
|
|
prerequisite to the fixture.
|
|
|
|
"""
|
2021-06-10 18:18:23 +03:00
|
|
|
psycopg2.extras.register_hstore(temp_db_cursor)
|
2021-01-27 00:45:24 +03:00
|
|
|
idseq = itertools.count(1001)
|
|
|
|
def _insert(osm_type='N', osm_id=None, cls='amenity', typ='cafe', names=None,
|
|
|
|
admin_level=None, address=None, extratags=None, geom=None):
|
|
|
|
temp_db_cursor.execute("INSERT INTO place VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)",
|
|
|
|
(osm_id or next(idseq), osm_type, cls, typ, names,
|
|
|
|
admin_level, address, extratags,
|
2021-02-26 17:02:39 +03:00
|
|
|
geom or 'SRID=4326;POINT(0 0)'))
|
2021-01-27 00:45:24 +03:00
|
|
|
|
|
|
|
return _insert
|
2021-02-09 00:21:57 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def placex_table(temp_db_with_extensions, temp_db_conn):
|
|
|
|
""" Create an empty version of the place table.
|
|
|
|
"""
|
2021-05-13 00:30:45 +03:00
|
|
|
return mocks.MockPlacexTable(temp_db_conn)
|
2021-02-26 17:02:39 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2021-05-19 18:37:03 +03:00
|
|
|
def osmline_table(temp_db_with_extensions, table_factory):
|
|
|
|
table_factory('location_property_osmline',
|
|
|
|
"""place_id BIGINT,
|
|
|
|
osm_id BIGINT,
|
|
|
|
parent_place_id BIGINT,
|
|
|
|
geometry_sector INTEGER,
|
|
|
|
indexed_date TIMESTAMP,
|
|
|
|
startnumber INTEGER,
|
|
|
|
endnumber INTEGER,
|
|
|
|
partition SMALLINT,
|
|
|
|
indexed_status SMALLINT,
|
|
|
|
linegeo GEOMETRY,
|
|
|
|
interpolationtype TEXT,
|
|
|
|
address HSTORE,
|
|
|
|
postcode TEXT,
|
|
|
|
country_code VARCHAR(2)""")
|
2021-02-26 17:02:39 +03:00
|
|
|
|
|
|
|
|
2021-03-11 13:09:17 +03:00
|
|
|
@pytest.fixture
|
2021-10-15 16:07:43 +03:00
|
|
|
def sql_preprocessor_cfg(tmp_path, table_factory, temp_db_with_extensions):
|
2021-04-27 12:37:18 +03:00
|
|
|
table_factory('country_name', 'partition INT', ((0, ), (1, ), (2, )))
|
2022-11-27 00:00:43 +03:00
|
|
|
cfg = Configuration(None)
|
|
|
|
cfg.set_libdirs(module='.', osm2pgsql='.', sql=tmp_path)
|
2021-10-15 16:07:43 +03:00
|
|
|
return cfg
|
2021-04-19 10:38:17 +03:00
|
|
|
|
2021-10-15 16:07:43 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def sql_preprocessor(sql_preprocessor_cfg, temp_db_conn):
|
|
|
|
return SQLPreprocessor(temp_db_conn, sql_preprocessor_cfg)
|
2021-04-24 12:25:47 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture
|
2021-05-20 00:07:39 +03:00
|
|
|
def tokenizer_mock(monkeypatch, property_table):
|
2021-04-24 12:25:47 +03:00
|
|
|
""" Sets up the configuration so that the test dummy tokenizer will be
|
2021-04-27 12:37:18 +03:00
|
|
|
loaded when the tokenizer factory is used. Also returns a factory
|
|
|
|
with which a new dummy tokenizer may be created.
|
2021-04-24 12:25:47 +03:00
|
|
|
"""
|
|
|
|
monkeypatch.setenv('NOMINATIM_TOKENIZER', 'dummy')
|
|
|
|
|
2021-05-20 00:07:39 +03:00
|
|
|
def _import_dummy(*args, **kwargs):
|
2021-04-24 12:25:47 +03:00
|
|
|
return dummy_tokenizer
|
|
|
|
|
2024-06-09 15:52:20 +03:00
|
|
|
monkeypatch.setattr(nominatim_db.tokenizer.factory,
|
|
|
|
"_import_tokenizer", _import_dummy)
|
2021-05-20 00:07:39 +03:00
|
|
|
property_table.set('tokenizer', 'dummy')
|
2021-04-27 12:37:18 +03:00
|
|
|
|
|
|
|
def _create_tokenizer():
|
|
|
|
return dummy_tokenizer.DummyTokenizer(None, None)
|
|
|
|
|
|
|
|
return _create_tokenizer
|