mirror of
https://github.com/osm-search/Nominatim.git
synced 2024-11-25 19:35:02 +03:00
Merge pull request #2282 from lonvia/add-paths-to-config
Include software paths in Python config object
This commit is contained in:
commit
6f6910101e
@ -84,6 +84,11 @@ class CommandlineParser:
|
||||
|
||||
args.config = Configuration(args.project_dir, args.config_dir,
|
||||
environ=kwargs.get('environ', os.environ))
|
||||
args.config.set_libdirs(module=args.module_dir,
|
||||
osm2pgsql=args.osm2pgsql_path,
|
||||
php=args.phplib_dir,
|
||||
sql=args.sqllib_dir,
|
||||
data=args.data_dir)
|
||||
|
||||
log = logging.getLogger()
|
||||
log.warning('Using project directory: %s', str(args.project_dir))
|
||||
@ -143,11 +148,8 @@ class UpdateAddData:
|
||||
@staticmethod
|
||||
def run(args):
|
||||
if args.tiger_data:
|
||||
return tiger_data.add_tiger_data(args.config.get_libpq_dsn(),
|
||||
args.tiger_data,
|
||||
args.threads or 1,
|
||||
args.config,
|
||||
args.sqllib_dir)
|
||||
return tiger_data.add_tiger_data(args.tiger_data,
|
||||
args.config, args.threads or 1)
|
||||
|
||||
params = ['update.php']
|
||||
if args.file:
|
||||
|
@ -64,7 +64,7 @@ class UpdateRefresh:
|
||||
if args.functions:
|
||||
LOG.warning('Create functions')
|
||||
with connect(args.config.get_libpq_dsn()) as conn:
|
||||
refresh.create_functions(conn, args.config, args.sqllib_dir,
|
||||
refresh.create_functions(conn, args.config,
|
||||
args.diffs, args.enable_debug_statements)
|
||||
|
||||
if args.wiki_data:
|
||||
@ -85,6 +85,6 @@ class UpdateRefresh:
|
||||
if args.website:
|
||||
webdir = args.project_dir / 'website'
|
||||
LOG.warning('Setting up website directory at %s', webdir)
|
||||
refresh.setup_website(webdir, args.phplib_dir, args.config)
|
||||
refresh.setup_website(webdir, args.config)
|
||||
|
||||
return 0
|
||||
|
@ -55,8 +55,7 @@ class UpdateReplication:
|
||||
replication.init_replication(conn, base_url=args.config.REPLICATION_URL)
|
||||
if args.update_functions:
|
||||
LOG.warning("Create functions")
|
||||
refresh.create_functions(conn, args.config, args.sqllib_dir,
|
||||
True, False)
|
||||
refresh.create_functions(conn, args.config, True, False)
|
||||
return 0
|
||||
|
||||
|
||||
|
@ -81,22 +81,19 @@ class SetupAll:
|
||||
|
||||
with connect(args.config.get_libpq_dsn()) as conn:
|
||||
LOG.warning('Create functions (1st pass)')
|
||||
refresh.create_functions(conn, args.config, args.sqllib_dir,
|
||||
False, False)
|
||||
refresh.create_functions(conn, args.config, False, False)
|
||||
LOG.warning('Create tables')
|
||||
database_import.create_tables(conn, args.config, args.sqllib_dir,
|
||||
database_import.create_tables(conn, args.config,
|
||||
reverse_only=args.reverse_only)
|
||||
refresh.load_address_levels_from_file(conn, Path(args.config.ADDRESS_LEVEL_CONFIG))
|
||||
LOG.warning('Create functions (2nd pass)')
|
||||
refresh.create_functions(conn, args.config, args.sqllib_dir,
|
||||
False, False)
|
||||
refresh.create_functions(conn, args.config, False, False)
|
||||
LOG.warning('Create table triggers')
|
||||
database_import.create_table_triggers(conn, args.config, args.sqllib_dir)
|
||||
database_import.create_table_triggers(conn, args.config)
|
||||
LOG.warning('Create partition tables')
|
||||
database_import.create_partition_tables(conn, args.config, args.sqllib_dir)
|
||||
database_import.create_partition_tables(conn, args.config)
|
||||
LOG.warning('Create functions (3rd pass)')
|
||||
refresh.create_functions(conn, args.config, args.sqllib_dir,
|
||||
False, False)
|
||||
refresh.create_functions(conn, args.config, False, False)
|
||||
|
||||
LOG.warning('Importing wikipedia importance data')
|
||||
data_path = Path(args.config.WIKIPEDIA_DATA_PATH or args.project_dir)
|
||||
@ -130,14 +127,13 @@ class SetupAll:
|
||||
LOG.warning('Post-process tables')
|
||||
with connect(args.config.get_libpq_dsn()) as conn:
|
||||
database_import.create_search_indices(conn, args.config,
|
||||
args.sqllib_dir,
|
||||
drop=args.no_updates)
|
||||
LOG.warning('Create search index for default country names.')
|
||||
database_import.create_country_names(conn, args.config)
|
||||
|
||||
webdir = args.project_dir / 'website'
|
||||
LOG.warning('Setup website at %s', webdir)
|
||||
refresh.setup_website(webdir, args.phplib_dir, args.config)
|
||||
refresh.setup_website(webdir, args.config)
|
||||
|
||||
with connect(args.config.get_libpq_dsn()) as conn:
|
||||
try:
|
||||
|
@ -39,6 +39,16 @@ class Configuration:
|
||||
self._config['NOMINATIM_ADDRESS_LEVEL_CONFIG'] = \
|
||||
str(config_dir / 'address-levels.json')
|
||||
|
||||
class _LibDirs: # pylint: disable=too-few-public-methods
|
||||
pass
|
||||
|
||||
self.lib_dir = _LibDirs()
|
||||
|
||||
def set_libdirs(self, **kwargs):
|
||||
""" Set paths to library functions and data.
|
||||
"""
|
||||
for key, value in kwargs.items():
|
||||
setattr(self.lib_dir, key, Path(value).resolve())
|
||||
|
||||
def __getattr__(self, name):
|
||||
name = 'NOMINATIM_' + name
|
||||
|
@ -75,9 +75,9 @@ class SQLPreprocessor: # pylint: disable=too-few-public-methods
|
||||
and follows its syntax.
|
||||
"""
|
||||
|
||||
def __init__(self, conn, config, sqllib_dir):
|
||||
def __init__(self, conn, config):
|
||||
self.env = jinja2.Environment(autoescape=False,
|
||||
loader=jinja2.FileSystemLoader(str(sqllib_dir)))
|
||||
loader=jinja2.FileSystemLoader(str(config.lib_dir.sql)))
|
||||
|
||||
db_info = {}
|
||||
db_info['partitions'] = _get_partitions(conn)
|
||||
|
@ -179,29 +179,29 @@ def import_osm_data(osm_file, options, drop=False, ignore_errors=False):
|
||||
Path(options['flatnode_file']).unlink()
|
||||
|
||||
|
||||
def create_tables(conn, config, sqllib_dir, reverse_only=False):
|
||||
def create_tables(conn, config, reverse_only=False):
|
||||
""" Create the set of basic tables.
|
||||
When `reverse_only` is True, then the main table for searching will
|
||||
be skipped and only reverse search is possible.
|
||||
"""
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql = SQLPreprocessor(conn, config)
|
||||
sql.env.globals['db']['reverse_only'] = reverse_only
|
||||
|
||||
sql.run_sql_file(conn, 'tables.sql')
|
||||
|
||||
|
||||
def create_table_triggers(conn, config, sqllib_dir):
|
||||
def create_table_triggers(conn, config):
|
||||
""" Create the triggers for the tables. The trigger functions must already
|
||||
have been imported with refresh.create_functions().
|
||||
"""
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql = SQLPreprocessor(conn, config)
|
||||
sql.run_sql_file(conn, 'table-triggers.sql')
|
||||
|
||||
|
||||
def create_partition_tables(conn, config, sqllib_dir):
|
||||
def create_partition_tables(conn, config):
|
||||
""" Create tables that have explicit partitioning.
|
||||
"""
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql = SQLPreprocessor(conn, config)
|
||||
sql.run_sql_file(conn, 'partition-tables.src.sql')
|
||||
|
||||
|
||||
@ -287,7 +287,7 @@ def load_data(dsn, data_dir, threads):
|
||||
cur.execute('ANALYSE')
|
||||
|
||||
|
||||
def create_search_indices(conn, config, sqllib_dir, drop=False):
|
||||
def create_search_indices(conn, config, drop=False):
|
||||
""" Create tables that have explicit partitioning.
|
||||
"""
|
||||
|
||||
@ -303,7 +303,7 @@ def create_search_indices(conn, config, sqllib_dir, drop=False):
|
||||
cur.execute('DROP INDEX "{}"'.format(idx))
|
||||
conn.commit()
|
||||
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql = SQLPreprocessor(conn, config)
|
||||
|
||||
sql.run_sql_file(conn, 'indices.sql', drop=drop)
|
||||
|
||||
|
@ -47,7 +47,7 @@ def migrate(config, paths):
|
||||
|
||||
if has_run_migration:
|
||||
LOG.warning('Updating SQL functions.')
|
||||
refresh.create_functions(conn, config, paths.sqllib_dir)
|
||||
refresh.create_functions(conn, config)
|
||||
|
||||
properties.set_property(conn, 'database_version',
|
||||
'{0[0]}.{0[1]}.{0[2]}-{0[3]}'.format(NOMINATIM_VERSION))
|
||||
|
@ -77,11 +77,10 @@ def load_address_levels_from_file(conn, config_file):
|
||||
load_address_levels(conn, 'address_levels', json.load(fdesc))
|
||||
|
||||
|
||||
def create_functions(conn, config, sqllib_dir,
|
||||
enable_diff_updates=True, enable_debug=False):
|
||||
def create_functions(conn, config, enable_diff_updates=True, enable_debug=False):
|
||||
""" (Re)create the PL/pgSQL functions.
|
||||
"""
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql = SQLPreprocessor(conn, config)
|
||||
|
||||
sql.run_sql_file(conn, 'functions.sql',
|
||||
disable_diff_updates=not enable_diff_updates,
|
||||
@ -165,7 +164,7 @@ def recompute_importance(conn):
|
||||
conn.commit()
|
||||
|
||||
|
||||
def setup_website(basedir, phplib_dir, config):
|
||||
def setup_website(basedir, config):
|
||||
""" Create the website script stubs.
|
||||
"""
|
||||
if not basedir.exists():
|
||||
@ -179,7 +178,7 @@ def setup_website(basedir, phplib_dir, config):
|
||||
@define('CONST_LibDir', '{0}');
|
||||
@define('CONST_NominatimVersion', '{1[0]}.{1[1]}.{1[2]}-{1[3]}');
|
||||
|
||||
""".format(phplib_dir, NOMINATIM_VERSION))
|
||||
""".format(config.lib_dir.php, NOMINATIM_VERSION))
|
||||
|
||||
for php_name, conf_name, var_type in PHP_CONST_DEFS:
|
||||
if var_type == bool:
|
||||
@ -193,7 +192,7 @@ def setup_website(basedir, phplib_dir, config):
|
||||
|
||||
template += "@define('CONST_{}', {});\n".format(php_name, varout)
|
||||
|
||||
template += "\nrequire_once('{}/website/{{}}');\n".format(phplib_dir)
|
||||
template += "\nrequire_once('{}/website/{{}}');\n".format(config.lib_dir.php)
|
||||
|
||||
for script in WEBSITE_SCRIPTS:
|
||||
(basedir / script).write_text(template.format(script), 'utf-8')
|
||||
|
@ -76,17 +76,17 @@ def handle_unregister_connection_pool(sel, place_threads):
|
||||
conn.close()
|
||||
place_threads -= 1
|
||||
|
||||
def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
""" Import tiger data from directory or tar file
|
||||
def add_tiger_data(data_dir, config, threads):
|
||||
""" Import tiger data from directory or tar file `data dir`.
|
||||
"""
|
||||
|
||||
dsn = config.get_libpq_dsn()
|
||||
sql_files, tar = handle_tarfile_or_directory(data_dir)
|
||||
|
||||
if not sql_files:
|
||||
return
|
||||
|
||||
with connect(dsn) as conn:
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql = SQLPreprocessor(conn, config)
|
||||
sql.run_sql_file(conn, 'tiger_import_start.sql')
|
||||
|
||||
# Reading sql_files and then for each file line handling
|
||||
@ -116,5 +116,5 @@ def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
print('\n')
|
||||
LOG.warning("Creating indexes on Tiger data")
|
||||
with connect(dsn) as conn:
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql = SQLPreprocessor(conn, config)
|
||||
sql.run_sql_file(conn, 'tiger_import_finish.sql')
|
||||
|
@ -107,7 +107,8 @@ class NominatimEnvironment:
|
||||
|
||||
self.website_dir = tempfile.TemporaryDirectory()
|
||||
cfg = Configuration(None, self.src_dir / 'settings', environ=self.test_env)
|
||||
refresh.setup_website(Path(self.website_dir.name) / 'website', self.src_dir / 'lib-php', cfg)
|
||||
cfg.lib_dir.php = self.src_dir / 'lib-php'
|
||||
refresh.setup_website(Path(self.website_dir.name) / 'website', cfg)
|
||||
|
||||
def get_libpq_dsn(self):
|
||||
dsn = self.test_env['NOMINATIM_DATABASE_DSN']
|
||||
|
@ -126,7 +126,12 @@ def table_factory(temp_db_cursor):
|
||||
|
||||
@pytest.fixture
|
||||
def def_config():
|
||||
return Configuration(None, SRC_DIR.resolve() / 'settings')
|
||||
cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
|
||||
cfg.set_libdirs(module='.', osm2pgsql='.',
|
||||
php=SRC_DIR / 'lib-php',
|
||||
sql=SRC_DIR / 'lib-sql',
|
||||
data=SRC_DIR / 'data')
|
||||
return cfg
|
||||
|
||||
@pytest.fixture
|
||||
def src_dir():
|
||||
@ -275,7 +280,11 @@ def osm2pgsql_options(temp_db):
|
||||
main_data='', main_index=''))
|
||||
|
||||
@pytest.fixture
|
||||
def sql_preprocessor(temp_db_conn, tmp_path, def_config, monkeypatch, table_factory):
|
||||
def sql_preprocessor(temp_db_conn, tmp_path, monkeypatch, table_factory):
|
||||
monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', '.')
|
||||
table_factory('country_name', 'partition INT', (0, 1, 2))
|
||||
return SQLPreprocessor(temp_db_conn, def_config, tmp_path)
|
||||
cfg = Configuration(None, SRC_DIR.resolve() / 'settings')
|
||||
cfg.set_libdirs(module='.', osm2pgsql='.', php=SRC_DIR / 'lib-php',
|
||||
sql=tmp_path, data=SRC_DIR / 'data')
|
||||
|
||||
return SQLPreprocessor(temp_db_conn, cfg)
|
||||
|
@ -5,6 +5,11 @@ import pytest
|
||||
|
||||
from nominatim.tools.refresh import create_functions
|
||||
|
||||
@pytest.fixture
|
||||
def sql_tmp_path(tmp_path, def_config):
|
||||
def_config.lib_dir.sql = tmp_path
|
||||
return tmp_path
|
||||
|
||||
@pytest.fixture
|
||||
def conn(temp_db_conn, table_factory, monkeypatch):
|
||||
monkeypatch.setenv('NOMINATIM_DATABASE_MODULE_PATH', '.')
|
||||
@ -12,8 +17,8 @@ def conn(temp_db_conn, table_factory, monkeypatch):
|
||||
return temp_db_conn
|
||||
|
||||
|
||||
def test_create_functions(temp_db_cursor, conn, def_config, tmp_path):
|
||||
sqlfile = tmp_path / 'functions.sql'
|
||||
def test_create_functions(temp_db_cursor, conn, def_config, sql_tmp_path):
|
||||
sqlfile = sql_tmp_path / 'functions.sql'
|
||||
sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
|
||||
AS $$
|
||||
BEGIN
|
||||
@ -22,14 +27,14 @@ def test_create_functions(temp_db_cursor, conn, def_config, tmp_path):
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
""")
|
||||
|
||||
create_functions(conn, def_config, tmp_path)
|
||||
create_functions(conn, def_config)
|
||||
|
||||
assert temp_db_cursor.scalar('SELECT test()') == 43
|
||||
|
||||
|
||||
@pytest.mark.parametrize("dbg,ret", ((True, 43), (False, 22)))
|
||||
def test_create_functions_with_template(temp_db_cursor, conn, def_config, tmp_path, dbg, ret):
|
||||
sqlfile = tmp_path / 'functions.sql'
|
||||
def test_create_functions_with_template(temp_db_cursor, conn, def_config, sql_tmp_path, dbg, ret):
|
||||
sqlfile = sql_tmp_path / 'functions.sql'
|
||||
sqlfile.write_text("""CREATE OR REPLACE FUNCTION test() RETURNS INTEGER
|
||||
AS $$
|
||||
BEGIN
|
||||
@ -42,6 +47,6 @@ def test_create_functions_with_template(temp_db_cursor, conn, def_config, tmp_pa
|
||||
$$ LANGUAGE plpgsql IMMUTABLE;
|
||||
""")
|
||||
|
||||
create_functions(conn, def_config, tmp_path, enable_debug=dbg)
|
||||
create_functions(conn, def_config, enable_debug=dbg)
|
||||
|
||||
assert temp_db_cursor.scalar('SELECT test()') == ret
|
||||
|
@ -25,7 +25,8 @@ def test_script(envdir):
|
||||
|
||||
|
||||
def run_website_script(envdir, config):
|
||||
refresh.setup_website(envdir, envdir / 'php', config)
|
||||
config.lib_dir.php = envdir / 'php'
|
||||
refresh.setup_website(envdir, config)
|
||||
|
||||
proc = subprocess.run(['/usr/bin/env', 'php', '-Cq',
|
||||
envdir / 'search.php'], check=False)
|
||||
|
@ -10,35 +10,31 @@ from nominatim.tools import tiger_data, database_import
|
||||
|
||||
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
def test_add_tiger_data(dsn, src_dir, def_config, tmp_path, sql_preprocessor,
|
||||
temp_db_cursor, threads, temp_db):
|
||||
temp_db_cursor.execute('CREATE EXTENSION hstore')
|
||||
temp_db_cursor.execute('CREATE EXTENSION postgis')
|
||||
def test_add_tiger_data(def_config, tmp_path, sql_preprocessor,
|
||||
temp_db_cursor, threads, temp_db_with_extensions):
|
||||
temp_db_cursor.execute('CREATE TABLE place (id INT)')
|
||||
sqlfile = tmp_path / '1010.sql'
|
||||
sqlfile.write_text("""INSERT INTO place values (1);
|
||||
INSERT INTO non_existant_table values (1);""")
|
||||
tiger_data.add_tiger_data(dsn, str(tmp_path), threads, def_config, src_dir / 'lib-sql')
|
||||
tiger_data.add_tiger_data(str(tmp_path), def_config, threads)
|
||||
|
||||
assert temp_db_cursor.table_rows('place') == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
def test_add_tiger_data_bad_file(dsn, src_dir, def_config, tmp_path, sql_preprocessor,
|
||||
temp_db_cursor, threads, temp_db):
|
||||
temp_db_cursor.execute('CREATE EXTENSION hstore')
|
||||
temp_db_cursor.execute('CREATE EXTENSION postgis')
|
||||
def test_add_tiger_data_bad_file(def_config, tmp_path, sql_preprocessor,
|
||||
temp_db_cursor, threads, temp_db_with_extensions):
|
||||
temp_db_cursor.execute('CREATE TABLE place (id INT)')
|
||||
sqlfile = tmp_path / '1010.txt'
|
||||
sqlfile.write_text("""Random text""")
|
||||
tiger_data.add_tiger_data(dsn, str(tmp_path), threads, def_config, src_dir / 'lib-sql')
|
||||
tiger_data.add_tiger_data(str(tmp_path), def_config, threads)
|
||||
|
||||
assert temp_db_cursor.table_rows('place') == 0
|
||||
|
||||
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
def test_add_tiger_data_tarfile(dsn, src_dir, def_config, tmp_path,
|
||||
temp_db_cursor, threads, temp_db, sql_preprocessor):
|
||||
temp_db_cursor.execute('CREATE EXTENSION hstore')
|
||||
temp_db_cursor.execute('CREATE EXTENSION postgis')
|
||||
def test_add_tiger_data_tarfile(def_config, tmp_path, temp_db_cursor,
|
||||
threads, temp_db_with_extensions, sql_preprocessor):
|
||||
temp_db_cursor.execute('CREATE TABLE place (id INT)')
|
||||
sqlfile = tmp_path / '1010.sql'
|
||||
sqlfile.write_text("""INSERT INTO place values (1);
|
||||
@ -46,21 +42,20 @@ def test_add_tiger_data_tarfile(dsn, src_dir, def_config, tmp_path,
|
||||
tar = tarfile.open("sample.tar.gz", "w:gz")
|
||||
tar.add(sqlfile)
|
||||
tar.close()
|
||||
tiger_data.add_tiger_data(dsn, str(src_dir / 'sample.tar.gz'), threads, def_config, src_dir / 'lib-sql')
|
||||
|
||||
tiger_data.add_tiger_data(str(tmp_path), def_config, threads)
|
||||
|
||||
assert temp_db_cursor.table_rows('place') == 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize("threads", (1, 5))
|
||||
def test_add_tiger_data_bad_tarfile(dsn, src_dir, def_config, tmp_path,
|
||||
temp_db_cursor, threads, temp_db, sql_preprocessor):
|
||||
temp_db_cursor.execute('CREATE EXTENSION hstore')
|
||||
temp_db_cursor.execute('CREATE EXTENSION postgis')
|
||||
def test_add_tiger_data_bad_tarfile(def_config, tmp_path, temp_db_cursor, threads,
|
||||
temp_db_with_extensions, sql_preprocessor):
|
||||
temp_db_cursor.execute('CREATE TABLE place (id INT)')
|
||||
sqlfile = tmp_path / '1010.txt'
|
||||
sqlfile.write_text("""Random text""")
|
||||
tar = tarfile.open("sample.tar.gz", "w:gz")
|
||||
tar.add(sqlfile)
|
||||
tar.close()
|
||||
tiger_data.add_tiger_data(dsn, str(src_dir / 'sample.tar.gz'), threads, def_config, src_dir / 'lib-sql')
|
||||
|
||||
assert temp_db_cursor.table_rows('place') == 0
|
||||
tiger_data.add_tiger_data(str(tmp_path), def_config, threads)
|
||||
|
||||
assert temp_db_cursor.table_rows('place') == 0
|
||||
|
Loading…
Reference in New Issue
Block a user