mirror of
https://github.com/osm-search/Nominatim.git
synced 2024-12-25 22:12:45 +03:00
fixed linting, refactored threaded sql handling and removed importTigerData() function
This commit is contained in:
parent
4080fbb95c
commit
64128b699a
@ -67,76 +67,6 @@ class SetupFunctions
|
||||
}
|
||||
}
|
||||
|
||||
public function importTigerData($sTigerPath)
|
||||
{
|
||||
info('Import Tiger data');
|
||||
|
||||
$aFilenames = glob($sTigerPath.'/*.sql');
|
||||
info('Found '.count($aFilenames).' SQL files in path '.$sTigerPath);
|
||||
if (empty($aFilenames)) {
|
||||
warn('Tiger data import selected but no files found in path '.$sTigerPath);
|
||||
return;
|
||||
}
|
||||
$sTemplate = file_get_contents(CONST_SqlDir.'/tiger_import_start.sql');
|
||||
$sTemplate = $this->replaceSqlPatterns($sTemplate);
|
||||
|
||||
$this->pgsqlRunScript($sTemplate, false);
|
||||
|
||||
$aDBInstances = array();
|
||||
for ($i = 0; $i < $this->iInstances; $i++) {
|
||||
// https://secure.php.net/manual/en/function.pg-connect.php
|
||||
$DSN = getSetting('DATABASE_DSN');
|
||||
$DSN = preg_replace('/^pgsql:/', '', $DSN);
|
||||
$DSN = preg_replace('/;/', ' ', $DSN);
|
||||
$aDBInstances[$i] = pg_connect($DSN, PGSQL_CONNECT_FORCE_NEW | PGSQL_CONNECT_ASYNC);
|
||||
pg_ping($aDBInstances[$i]);
|
||||
}
|
||||
|
||||
foreach ($aFilenames as $sFile) {
|
||||
echo $sFile.': ';
|
||||
$hFile = fopen($sFile, 'r');
|
||||
$sSQL = fgets($hFile, 100000);
|
||||
$iLines = 0;
|
||||
while (true) {
|
||||
for ($i = 0; $i < $this->iInstances; $i++) {
|
||||
if (!pg_connection_busy($aDBInstances[$i])) {
|
||||
while (pg_get_result($aDBInstances[$i]));
|
||||
$sSQL = fgets($hFile, 100000);
|
||||
if (!$sSQL) break 2;
|
||||
if (!pg_send_query($aDBInstances[$i], $sSQL)) fail(pg_last_error($aDBInstances[$i]));
|
||||
$iLines++;
|
||||
if ($iLines == 1000) {
|
||||
echo '.';
|
||||
$iLines = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
usleep(10);
|
||||
}
|
||||
fclose($hFile);
|
||||
|
||||
$bAnyBusy = true;
|
||||
while ($bAnyBusy) {
|
||||
$bAnyBusy = false;
|
||||
for ($i = 0; $i < $this->iInstances; $i++) {
|
||||
if (pg_connection_busy($aDBInstances[$i])) $bAnyBusy = true;
|
||||
}
|
||||
usleep(10);
|
||||
}
|
||||
echo "\n";
|
||||
}
|
||||
|
||||
for ($i = 0; $i < $this->iInstances; $i++) {
|
||||
pg_close($aDBInstances[$i]);
|
||||
}
|
||||
|
||||
info('Creating indexes on Tiger data');
|
||||
$sTemplate = file_get_contents(CONST_SqlDir.'/tiger_import_finish.sql');
|
||||
$sTemplate = $this->replaceSqlPatterns($sTemplate);
|
||||
|
||||
$this->pgsqlRunScript($sTemplate, false);
|
||||
}
|
||||
|
||||
public function calculatePostcodes($bCMDResultAll)
|
||||
{
|
||||
info('Calculate Postcodes');
|
||||
|
@ -1,5 +1,5 @@
|
||||
"""
|
||||
Functions for setting up and importing a new Nominatim database.
|
||||
Functions for importing tiger data and handling tarbar and directory files
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
@ -10,32 +10,68 @@ from ..db.connection import connect
|
||||
from ..db.async_connection import DBConnection
|
||||
from ..db.sql_preprocessor import SQLPreprocessor
|
||||
|
||||
# pylint: disable=R0912
|
||||
# pylint: disable=R0914,R0915,W0702
|
||||
|
||||
LOG = logging.getLogger()
|
||||
|
||||
|
||||
def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
""" Import tiger data from directory or tar file
|
||||
def handle_tarfile_or_directory(data_dir):
|
||||
""" Handles tarfile or directory for importing tiger data
|
||||
"""
|
||||
# Handling directory or tarball file.
|
||||
is_tarfile = False
|
||||
|
||||
tar = None
|
||||
if data_dir.endswith('.tar.gz'):
|
||||
is_tarfile = True
|
||||
tar = tarfile.open(data_dir)
|
||||
sql_files = [i for i in tar.getmembers() if i.name.endswith('.sql')]
|
||||
LOG.warning("Found %d SQL files in tarfile with path %s", len(sql_files), data_dir)
|
||||
if not sql_files:
|
||||
LOG.warning("Tiger data import selected but no files in tarfile's path %s", data_dir)
|
||||
return
|
||||
return None, None
|
||||
else:
|
||||
files = os.listdir(data_dir)
|
||||
sql_files = [i for i in files if i.endswith('.sql')]
|
||||
sql_files = [os.path.join(data_dir, i) for i in files if i.endswith('.sql')]
|
||||
LOG.warning("Found %d SQL files in path %s", len(sql_files), data_dir)
|
||||
if not sql_files:
|
||||
LOG.warning("Tiger data import selected but no files found in path %s", data_dir)
|
||||
return
|
||||
return None, None
|
||||
|
||||
return sql_files, tar
|
||||
|
||||
|
||||
def handle_threaded_sql_statements(sel, file):
|
||||
""" Handles sql statement with multiplexing
|
||||
"""
|
||||
|
||||
lines = 0
|
||||
end_of_file = False
|
||||
# Using pool of database connections to execute sql statements
|
||||
while True:
|
||||
if end_of_file:
|
||||
break
|
||||
for key, _ in sel.select(1):
|
||||
conn = key.data
|
||||
try:
|
||||
if conn.is_done():
|
||||
sql_query = file.readline()
|
||||
lines += 1
|
||||
if not sql_query:
|
||||
end_of_file = True
|
||||
break
|
||||
conn.perform(sql_query)
|
||||
if lines == 1000:
|
||||
print('. ', end='', flush=True)
|
||||
lines = 0
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
LOG.error('Wrong SQL statement: %s', exc)
|
||||
|
||||
|
||||
def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
""" Import tiger data from directory or tar file
|
||||
"""
|
||||
|
||||
sql_files, tar = handle_tarfile_or_directory(data_dir)
|
||||
|
||||
if not sql_files:
|
||||
return
|
||||
|
||||
with connect(dsn) as conn:
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
@ -45,54 +81,34 @@ def add_tiger_data(dsn, data_dir, threads, config, sqllib_dir):
|
||||
# sql_query in <threads - 1> chunks.
|
||||
sel = selectors.DefaultSelector()
|
||||
place_threads = max(1, threads - 1)
|
||||
|
||||
# Creates a pool of database connections
|
||||
for _ in range(place_threads):
|
||||
conn = DBConnection(dsn)
|
||||
conn.connect()
|
||||
sel.register(conn, selectors.EVENT_WRITE, conn)
|
||||
|
||||
for sql_file in sql_files:
|
||||
if not is_tarfile:
|
||||
file_path = os.path.join(data_dir, sql_file)
|
||||
file = open(file_path)
|
||||
if not tar:
|
||||
file = open(sql_file)
|
||||
else:
|
||||
file = tar.extractfile(sql_file)
|
||||
lines = 0
|
||||
end_of_file = False
|
||||
total_used_threads = place_threads
|
||||
while True:
|
||||
if end_of_file:
|
||||
break
|
||||
for imod in range(place_threads):
|
||||
conn = DBConnection(dsn)
|
||||
conn.connect()
|
||||
|
||||
sql_query = file.readline()
|
||||
lines += 1
|
||||
handle_threaded_sql_statements(sel, file)
|
||||
|
||||
if not sql_query:
|
||||
end_of_file = True
|
||||
total_used_threads = imod
|
||||
break
|
||||
# Unregistering pool of database connections
|
||||
while place_threads > 0:
|
||||
for key, _ in sel.select(1):
|
||||
conn = key.data
|
||||
sel.unregister(conn)
|
||||
conn.wait()
|
||||
conn.close()
|
||||
place_threads -= 1
|
||||
|
||||
conn.perform(sql_query)
|
||||
sel.register(conn, selectors.EVENT_READ, conn)
|
||||
|
||||
if lines == 1000:
|
||||
print('. ', end='', flush=True)
|
||||
lines = 0
|
||||
|
||||
todo = min(place_threads, total_used_threads)
|
||||
while todo > 0:
|
||||
for key, _ in sel.select(1):
|
||||
try:
|
||||
conn = key.data
|
||||
sel.unregister(conn)
|
||||
conn.wait()
|
||||
conn.close()
|
||||
todo -= 1
|
||||
except:
|
||||
todo -= 1
|
||||
|
||||
if is_tarfile:
|
||||
if tar:
|
||||
tar.close()
|
||||
print('\n')
|
||||
LOG.warning("Creating indexes on Tiger data")
|
||||
with connect(dsn) as conn:
|
||||
sql = SQLPreprocessor(conn, config, sqllib_dir)
|
||||
sql.run_sql_file(conn, 'tiger_import_finish.sql')
|
||||
|
Loading…
Reference in New Issue
Block a user