add tests for cleaning housenumbers

This commit is contained in:
Sarah Hoffmann 2022-01-20 23:47:20 +01:00
parent 3ce123ab69
commit c170d323d9
7 changed files with 106 additions and 1 deletions

View File

@ -309,12 +309,20 @@ jobs:
NOMINATIM_REPLICATION_MAX_DIFF=1 nominatim replication --once
working-directory: /home/nominatim/nominatim-project
- name: Clean up database
run: nominatim refresh --postcodes --word-tokens
working-directory: /home/nominatim/nominatim-project
- name: Run reverse-only import
run : |
echo 'NOMINATIM_DATABASE_DSN="pgsql:dbname=reverse"' >> .env
nominatim import --osm-file ../test.pbf --reverse-only --no-updates
working-directory: /home/nominatim/data-env-reverse
- name: Check reverse import
- name: Check reverse-only import
run: nominatim admin --check-database
working-directory: /home/nominatim/data-env-reverse
- name: Clean up database (reverse-only import)
run: nominatim refresh --postcodes --word-tokens
working-directory: /home/nominatim/nominatim-project

View File

@ -79,6 +79,7 @@ class UpdateRefresh:
"Postcode updates on a frozen database is not possible.")
if args.word_tokens:
LOG.warning('Updating word tokens')
tokenizer = self._get_tokenizer(args.config)
tokenizer.update_word_tokens()

View File

@ -30,6 +30,7 @@ class DummyTokenizer:
self.update_sql_functions_called = False
self.finalize_import_called = False
self.update_statistics_called = False
self.update_word_tokens_called = False
def update_sql_functions(self, *args):
self.update_sql_functions_called = True
@ -40,6 +41,9 @@ class DummyTokenizer:
def update_statistics(self):
self.update_statistics_called = True
def update_word_tokens(self):
self.update_word_tokens_called = True
@pytest.fixture
def cli_call(src_dir):

View File

@ -39,6 +39,11 @@ class TestRefresh:
assert self.tokenizer_mock.update_statistics_called
def test_refresh_word_tokens(self):
assert self.call_nominatim('refresh', '--word-tokens') == 0
assert self.tokenizer_mock.update_word_tokens_called
def test_refresh_postcodes(self, mock_func_factory, place_table):
func_mock = mock_func_factory(nominatim.tools.postcodes, 'update_postcodes')
idx_mock = mock_func_factory(nominatim.indexer.indexer.Indexer, 'index_postcodes')

View File

@ -58,6 +58,14 @@ class MockIcuWordTable:
self.conn.commit()
def add_housenumber(self, word_id, word_token):
with self.conn.cursor() as cur:
cur.execute("""INSERT INTO word (word_id, word_token, type)
VALUES (%s, %s, 'H')
""", (word_id, word_token))
self.conn.commit()
def count(self):
with self.conn.cursor() as cur:
return cur.scalar("SELECT count(*) FROM word")
@ -68,6 +76,11 @@ class MockIcuWordTable:
return cur.scalar("SELECT count(*) FROM word WHERE type = 'S'")
def count_housenumbers(self):
with self.conn.cursor() as cur:
return cur.scalar("SELECT count(*) FROM word WHERE type = 'H'")
def get_special(self):
with self.conn.cursor() as cur:
cur.execute("SELECT word_token, info, word FROM word WHERE type = 'S'")

View File

@ -9,6 +9,7 @@ Tests for ICU tokenizer.
"""
import shutil
import yaml
import itertools
import pytest
@ -554,3 +555,69 @@ class TestPlaceAddress:
assert 'addr' not in info
class TestUpdateWordTokens:
@pytest.fixture(autouse=True)
def setup(self, tokenizer_factory, table_factory, placex_table, word_table):
table_factory('search_name', 'place_id BIGINT, name_vector INT[]')
self.tok = tokenizer_factory()
@pytest.fixture
def search_entry(self, temp_db_cursor):
place_id = itertools.count(1000)
def _insert(*args):
temp_db_cursor.execute("INSERT INTO search_name VALUES (%s, %s)",
(next(place_id), list(args)))
return _insert
@pytest.mark.parametrize('hnr', ('1a', '1234567', '34 5'))
def test_remove_unused_housenumbers(self, word_table, hnr):
word_table.add_housenumber(1000, hnr)
assert word_table.count_housenumbers() == 1
self.tok.update_word_tokens()
assert word_table.count_housenumbers() == 0
def test_keep_unused_numeral_housenumbers(self, word_table):
word_table.add_housenumber(1000, '5432')
assert word_table.count_housenumbers() == 1
self.tok.update_word_tokens()
assert word_table.count_housenumbers() == 1
def test_keep_housenumbers_from_search_name_table(self, word_table, search_entry):
word_table.add_housenumber(9999, '5432a')
word_table.add_housenumber(9991, '9 a')
search_entry(123, 9999, 34)
assert word_table.count_housenumbers() == 2
self.tok.update_word_tokens()
assert word_table.count_housenumbers() == 1
def test_keep_housenumbers_from_placex_table(self, word_table, placex_table):
word_table.add_housenumber(9999, '5432a')
word_table.add_housenumber(9990, '34z')
placex_table.add(housenumber='34z')
placex_table.add(housenumber='25432a')
assert word_table.count_housenumbers() == 2
self.tok.update_word_tokens()
assert word_table.count_housenumbers() == 1
def test_keep_housenumbers_from_placex_table_hnr_list(self, word_table, placex_table):
word_table.add_housenumber(9991, '9 b')
word_table.add_housenumber(9990, '34z')
placex_table.add(housenumber='9 a;9 b;9 c')
assert word_table.count_housenumbers() == 2
self.tok.update_word_tokens()
assert word_table.count_housenumbers() == 1

View File

@ -257,6 +257,13 @@ def test_update_statistics(word_table, table_factory, temp_db_cursor, tokenizer_
search_name_count > 0""") > 0
def test_update_word_tokens(tokenizer_factory):
tok = tokenizer_factory()
# This is a noop and should just pass.
tok.update_word_tokens()
def test_normalize(analyzer):
assert analyzer.normalize('TEsT') == 'test'