simple search steps

This commit is contained in:
Sarah Hoffmann 2016-11-20 17:30:54 +01:00
parent c56c09e2c0
commit 47f94c6988
8 changed files with 277 additions and 71 deletions

View File

@ -0,0 +1,16 @@
@DB
Feature: Import of simple objects
Testing simple stuff
@wip
Scenario: Import place node
Given the places
| osm | class | type | name | name+ref | geometry |
| N1 | place | village | Foo | 32 | 10.0 -10.0 |
And the named places
| osm | class | type | housenr |
| N2 | place | village | |
When importing
Then placex contains
| object | class | type | name | name+ref | centroid*10 |
| N1 | place | village | Foo | 32 | 1 -1 |

View File

@ -1,20 +0,0 @@
@DB
Feature: Import of simple objects
Testing simple stuff
@wip
Scenario: Import place node
Given the places
| osm | class | type | name | geometry |
| N1 | place | village | 'name' : 'Foo' | 10.0 -10.0 |
And the named places
| osm | class | type | housenumber |
| N2 | place | village | |
When importing
Then table placex contains
| object | class | type | name | centroid |
| N1 | place | village | 'name' : 'Foo' | 10.0,-10.0 +- 1m |
When sending query "Foo"
Then results contain
| ID | osm_type | osm_id |
| 0 | N | 1 |

View File

@ -0,0 +1,13 @@
@DB
Feature: Searching of simple objects
Testing simple stuff
Scenario: Search for place node
Given the places
| osm | class | type | name+name | geometry |
| N1 | place | village | Foo | 10.0 -10.0 |
When importing
And searching for "Foo"
Then results contain
| ID | osm | class | type | centroid |
| 0 | N1 | place | village | 10 -10 |

View File

@ -1 +0,0 @@
Feature: Test

View File

@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
userconfig = {
'BASEURL' : 'http://localhost/nominatim',
'BUILDDIR' : '../build',
'BUILDDIR' : os.path.join(os.path.split(__file__)[0], "../../build"),
'REMOVE_TEMPLATE' : False,
'KEEP_TEST_DB' : False,
'TEMPLATE_DB' : 'test_template_nominatim',
@ -140,15 +140,53 @@ class OSMDataFactory(object):
scriptpath = os.path.dirname(os.path.abspath(__file__))
self.scene_path = os.environ.get('SCENE_PATH',
os.path.join(scriptpath, '..', 'scenes', 'data'))
self.scene_cache = {}
def make_geometry(self, geom):
def parse_geometry(self, geom, scene):
if geom[0].find(':') >= 0:
out = self.get_scene_geometry(scene, geom[1:])
if geom.find(',') < 0:
return 'POINT(%s)' % geom
out = 'POINT(%s)' % geom
elif geom.find('(') < 0:
out = 'LINESTRING(%s)' % geom
else:
out = 'POLYGON(%s)' % geom
if geom.find('(') < 0:
return 'LINESTRING(%s)' % geom
# TODO parse precision
return out, 0
return 'POLYGON(%s)' % geom
def get_scene_geometry(self, default_scene, name):
geoms = []
defscene = self.load_scene(default_scene)
for obj in name.split('+'):
oname = obj.strip()
if oname.startswith(':'):
wkt = defscene[oname[1:]]
else:
scene, obj = oname.split(':', 2)
scene_geoms = world.load_scene(scene)
wkt = scene_geoms[obj]
geoms.append("'%s'::geometry" % wkt)
if len(geoms) == 1:
return geoms[0]
else:
return 'ST_LineMerge(ST_Collect(ARRAY[%s]))' % ','.join(geoms)
def load_scene(self, name):
if name in self.scene_cache:
return self.scene_cache[name]
scene = {}
with open(os.path.join(self.scene_path, "%s.wkt" % name), 'r') as fd:
for line in fd:
if line.strip():
obj, wkt = line.split('|', 2)
scene[obj.strip()] = wkt.strip()
self.scene_cache[name] = scene
return scene
def before_all(context):
@ -169,6 +207,7 @@ def after_all(context):
def before_scenario(context, scenario):
if 'DB' in context.tags:
context.nominatim.setup_db(context)
context.scene = None
def after_scenario(context, scenario):
if 'DB' in context.tags:

View File

@ -1,31 +1,108 @@
import base64
import random
import string
import re
from nose.tools import * # for assert functions
import psycopg2.extras
def _format_placex_columns(row, force_name):
out = {
'osm_type' : row['osm'][0],
'osm_id' : row['osm'][1:],
'admin_level' : row.get('admin_level', 100)
}
class PlaceColumn:
for k in ('class', 'type', 'housenumber', 'street',
'addr_place', 'isin', 'postcode', 'country_code'):
if k in row.headings and row[k]:
out[k] = row[k]
def __init__(self, context, force_name):
self.columns = {}
self.force_name = force_name
self.context = context
self.geometry = None
if 'name' in row.headings:
if row['name'].startswith("'"):
out['name'] = eval('{' + row['name'] + '}')
def add(self, key, value):
if hasattr(self, 'set_key_' + key):
getattr(self, 'set_key_' + key)(value)
elif key.startswith('name+'):
self.add_hstore('name', key[5:], value)
elif key.startswith('extra+'):
self.add_hstore('extratags', key[6:], value)
else:
out['name'] = { 'name' : row['name'] }
elif force_name:
out['name'] = { 'name' : ''.join(random.choice(string.printable) for _ in range(int(random.random()*30))) }
assert_in(key, ('class', 'type', 'street', 'addr_place',
'isin', 'postcode'))
self.columns[key] = value
if 'extratags' in row.headings:
out['extratags'] = eval('{%s}' % row['extratags'])
def set_key_name(self, value):
self.add_hstore('name', 'name', value)
return out
def set_key_osm(self, value):
assert_in(value[0], 'NRW')
ok_(value[1:].isdigit())
self.columns['osm_type'] = value[0]
self.columns['osm_id'] = int(value[1:])
def set_key_admin(self, value):
self.columns['admin_level'] = int(value)
def set_key_housenr(self, value):
self.columns['housenumber'] = value
def set_key_cc(self, value):
ok_(len(value) == 2)
self.columns['country_code'] = value
def set_key_geometry(self, value):
geom, precision = self.context.osm.parse_geometry(value, self.context.scene)
assert_is_not_none(geom)
self.geometry = "ST_SetSRID('%s'::geometry, 4326)" % geom
def add_hstore(self, column, key, value):
if column in self.columns:
self.columns[column][key] = value
else:
self.columns[column] = { key : value }
def db_insert(self, cursor):
assert_in('osm_type', self.columns)
if self.force_name and 'name' not in self.columns:
self.add_hstore('name', 'name', ''.join(random.choice(string.printable)
for _ in range(int(random.random()*30))))
if self.columns['osm_type'] == 'N' and self.geometry is None:
self.geometry = "ST_SetSRID(ST_Point(%f, %f), 4326)" % (
random.random()*360 - 180, random.random()*180 - 90)
query = 'INSERT INTO place (%s, geometry) values(%s, %s)' % (
','.join(self.columns.keys()),
','.join(['%s' for x in range(len(self.columns))]),
self.geometry)
cursor.execute(query, list(self.columns.values()))
class NominatimID:
""" Splits a unique identifier for places into its components.
As place_ids cannot be used for testing, we use a unique
identifier instead that is of the form <osmtype><osmid>[:<class>].
"""
id_regex = re.compile(r"(?P<tp>[NRW])(?P<id>\d+)(?P<cls>:\w+)?")
def __init__(self, oid):
self.typ = self.oid = self.cls = None
if oid is not None:
m = self.id_regex.fullmatch(oid)
assert_is_not_none(m, "ID '%s' not of form <osmtype><osmid>[:<class>]" % oid)
self.typ = m.group('tp')
self.oid = m.group('id')
self.cls = m.group('cls')
def table_select(self):
""" Return where clause and parameter list to select the object
from a Nominatim table.
"""
where = 'osm_type = %s and osm_id = %s'
params = [self.typ, self. oid]
if self.cls is not None:
where += ' class = %s'
params.append(self.cls)
return where, params
@given("the (?P<named>named )?places")
@ -33,21 +110,12 @@ def add_data_to_place_table(context, named):
cur = context.db.cursor()
cur.execute('ALTER TABLE place DISABLE TRIGGER place_before_insert')
for r in context.table:
cols = _format_placex_columns(r, named is not None)
col = PlaceColumn(context, named is not None)
if 'geometry' in r.headings:
geometry = "'%s'::geometry" % context.osm.make_geometry(r['geometry'])
elif cols['osm_type'] == 'N':
geometry = "ST_Point(%f, %f)" % (random.random()*360 - 180, random.random()*180 - 90)
else:
raise RuntimeError("Missing geometry for place")
for h in r.headings:
col.add(h, r[h])
query = 'INSERT INTO place (%s, geometry) values(%s, ST_SetSRID(%s, 4326))' % (
','.join(cols.keys()),
','.join(['%s' for x in range(len(cols))]),
geometry
)
cur.execute(query, list(cols.values()))
col.db_insert(cur)
cur.execute('ALTER TABLE place ENABLE TRIGGER place_before_insert')
cur.close()
context.db.commit()
@ -70,6 +138,36 @@ def import_and_index_data_from_place_table(context):
context.nominatim.run_setup_script('index', 'index-noanalyse')
@then("table (?P<table>\w+) contains(?P<exact> exactly)?")
def check_table_contents(context, table, exact):
pass
@then("placex contains(?P<exact> exactly)?")
def check_placex_contents(context, exact):
cur = context.db.cursor(cursor_factory=psycopg2.extras.DictCursor)
if exact:
cur.execute('SELECT osm_type, osm_id, class from placex')
to_match = [(r[0], r[1], r[2]) for r in cur]
for row in context.table:
nid = NominatimID(row['object'])
where, params = nid.table_select()
cur.execute("""SELECT *, ST_AsText(geometry) as geomtxt,
ST_X(centroid) as cx, ST_Y(centroid) as cy
FROM placex where %s""" % where,
params)
for res in cur:
for h in row.headings:
if h == 'object':
pass
elif h.startswith('name'):
name = h[5:] if h.startswith('name+') else 'name'
assert_in(name, res['name'])
eq_(res['name'][name], row[h])
elif h.startswith('extratags+'):
eq_(res['extratags'][h[10:]], row[h])
elif h.startswith('centroid'):
fac = float(h[9:]) if h.startswith('centroid*') else 1.0
x, y = row[h].split(' ')
assert_almost_equal(float(x) * fac, res['cx'])
assert_almost_equal(float(y) * fac, res['cy'])
else:
eq_(row[h], str(res[h]))
context.db.commit()

View File

@ -4,14 +4,47 @@
or via the HTTP interface.
"""
import json
import os
import subprocess
from collections import OrderedDict
from nose.tools import * # for assert functions
class SearchResponse(object):
def __init__(response,
def __init__(self, page, fmt='json', errorcode=200):
self.page = page
self.format = fmt
self.errorcode = errorcode
getattr(self, 'parse_' + fmt)()
@when(u'searching for "(?P<query>.*)"( with params)?$')
def parse_json(self):
self.result = json.JSONDecoder(object_pairs_hook=OrderedDict).decode(self.page)
def match_row(self, row):
if 'ID' in row.headings:
todo = [int(row['ID'])]
else:
todo = range(len(self.result))
for i in todo:
res = self.result[i]
for h in row.headings:
if h == 'ID':
pass
elif h == 'osm':
assert_equal(res['osm_type'], row[h][0])
assert_equal(res['osm_id'], row[h][1:])
elif h == 'centroid':
x, y = row[h].split(' ')
assert_almost_equal(float(y), float(res['lat']))
assert_almost_equal(float(x), float(res['lon']))
else:
assert_in(h, res)
assert_equal(str(res[h]), str(row[h]))
@when(u'searching for "(?P<query>.*)"')
def query_cmd(context, query):
""" Query directly via PHP script.
"""
@ -28,11 +61,6 @@ def query_cmd(context, query):
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outp, err) = proc.communicate()
assert_equals (0, proc.returncode), "query.php failed with message: %s" % err
context.
world.page = outp
world.response_format = 'json'
world.request_type = 'search'
world.returncode = 200
assert_equals (0, proc.returncode, "query.php failed with message: %s" % err)
context.response = SearchResponse(outp.decode('utf-8'), 'json')

33
test/bdd/steps/results.py Normal file
View File

@ -0,0 +1,33 @@
""" Steps that check results.
"""
from nose.tools import * # for assert functions
def compare(operator, op1, op2):
if operator == 'less than':
return op1 < op2
elif operator == 'more than':
return op1 > op2
elif operator == 'exactly':
return op1 == op2
elif operator == 'at least':
return op1 >= op2
elif operator == 'at most':
return op1 <= op2
else:
raise Exception("unknown operator '%s'" % operator)
@step(u'(?P<operator>less than|more than|exactly|at least|at most) (?P<number>\d+) results? (?:is|are) returned')
def validate_result_number(context, operator, number):
numres = len(context.response.result)
ok_(compare(operator, numres, int(number)),
"Bad number of results: expected %s %s, got %d." % (operator, number, numres))
@then(u'results contain')
def step_impl(context):
context.execute_steps("then at least 1 result is returned")
for line in context.table:
context.response.match_row(line)