mirror of
https://github.com/osm-search/Nominatim.git
synced 2024-09-21 07:58:07 +03:00
do not hide errors when importing tokenizer
Explicitly check for the tokenizer source file to check that the name is correct. We can't use the import error for that because it hides other import errors like a missing library. Fixes #2327.
This commit is contained in:
parent
54b06d7abc
commit
b2722650d4
@ -15,6 +15,7 @@ normalizer module is installed, when the tokenizer is created.
|
||||
"""
|
||||
import logging
|
||||
import importlib
|
||||
from pathlib import Path
|
||||
|
||||
from ..errors import UsageError
|
||||
from ..db import properties
|
||||
@ -25,12 +26,13 @@ LOG = logging.getLogger()
|
||||
def _import_tokenizer(name):
|
||||
""" Load the tokenizer.py module from project directory.
|
||||
"""
|
||||
try:
|
||||
return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer')
|
||||
except ModuleNotFoundError as exp:
|
||||
src_file = Path(__file__).parent / (name + '_tokenizer.py')
|
||||
if not src_file.is_file():
|
||||
LOG.fatal("No tokenizer named '%s' available. "
|
||||
"Check the setting of NOMINATIM_TOKENIZER.", name)
|
||||
raise UsageError('Tokenizer not found') from exp
|
||||
raise UsageError('Tokenizer not found')
|
||||
|
||||
return importlib.import_module('nominatim.tokenizer.' + name + '_tokenizer')
|
||||
|
||||
|
||||
def create_tokenizer(config, init_db=True, module_name=None):
|
||||
|
@ -17,6 +17,7 @@ from nominatim.config import Configuration
|
||||
from nominatim.db import connection
|
||||
from nominatim.db.sql_preprocessor import SQLPreprocessor
|
||||
from nominatim.db import properties
|
||||
import nominatim.tokenizer.factory
|
||||
|
||||
import dummy_tokenizer
|
||||
import mocks
|
||||
@ -273,7 +274,7 @@ def tokenizer_mock(monkeypatch, property_table, temp_db_conn, tmp_path):
|
||||
def _import_dummy(module, *args, **kwargs):
|
||||
return dummy_tokenizer
|
||||
|
||||
monkeypatch.setattr(importlib, "import_module", _import_dummy)
|
||||
monkeypatch.setattr(nominatim.tokenizer.factory, "_import_tokenizer", _import_dummy)
|
||||
properties.set_property(temp_db_conn, 'tokenizer', 'dummy')
|
||||
|
||||
def _create_tokenizer():
|
||||
|
Loading…
Reference in New Issue
Block a user