mirror of
https://github.com/searxng/searxng
synced 2024-01-01 19:24:07 +01:00
data: osm_keys_tags: use SQLite instead of JSON
This commit is contained in:
parent
71f1789be0
commit
c0a96d8ef8
8 changed files with 72424 additions and 56765 deletions
|
@ -11,12 +11,12 @@ __all__ = [
|
||||||
'EXTERNAL_URLS',
|
'EXTERNAL_URLS',
|
||||||
'WIKIDATA_UNITS',
|
'WIKIDATA_UNITS',
|
||||||
'EXTERNAL_BANGS',
|
'EXTERNAL_BANGS',
|
||||||
'OSM_KEYS_TAGS',
|
|
||||||
'LOCALES',
|
'LOCALES',
|
||||||
'ahmia_blacklist_loader',
|
'ahmia_blacklist_loader',
|
||||||
'fetch_engine_descriptions',
|
'fetch_engine_descriptions',
|
||||||
'fetch_iso4217_from_user',
|
'fetch_iso4217_from_user',
|
||||||
'fetch_name_from_iso4217',
|
'fetch_name_from_iso4217',
|
||||||
|
'fetch_osm_key_label',
|
||||||
]
|
]
|
||||||
|
|
||||||
import re
|
import re
|
||||||
|
@ -109,6 +109,45 @@ def fetch_name_from_iso4217(iso4217: str, language: str) -> Optional[str]:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(100)
|
||||||
|
def fetch_osm_key_label(key_name: str, language: str) -> Optional[str]:
|
||||||
|
if key_name.startswith('currency:'):
|
||||||
|
# currency:EUR --> get the name from the CURRENCIES variable
|
||||||
|
# see https://wiki.openstreetmap.org/wiki/Key%3Acurrency
|
||||||
|
# and for example https://taginfo.openstreetmap.org/keys/currency:EUR#values
|
||||||
|
# but there is also currency=EUR (currently not handled)
|
||||||
|
# https://taginfo.openstreetmap.org/keys/currency#values
|
||||||
|
currency = key_name.split(':')
|
||||||
|
if len(currency) > 1:
|
||||||
|
label = fetch_name_from_iso4217(currency[1], language)
|
||||||
|
if label:
|
||||||
|
return label
|
||||||
|
return currency[1]
|
||||||
|
|
||||||
|
language = language.lower()
|
||||||
|
language_short = language.split('-')[0]
|
||||||
|
with sql_connection("osm_keys_tags.db") as conn:
|
||||||
|
res = conn.execute(
|
||||||
|
"SELECT language, label FROM osm_keys WHERE name=? AND language in (?, ?, 'en')",
|
||||||
|
(key_name, language, language_short),
|
||||||
|
)
|
||||||
|
result = {result[0]: result[1] for result in res.fetchall()}
|
||||||
|
return result.get(language) or result.get(language_short) or result.get('en')
|
||||||
|
|
||||||
|
|
||||||
|
@lru_cache(100)
|
||||||
|
def fetch_osm_tag_label(tag_key: str, tag_value: str, language: str) -> Optional[str]:
|
||||||
|
language = language.lower()
|
||||||
|
language_short = language.split('-')[0]
|
||||||
|
with sql_connection("osm_keys_tags.db") as conn:
|
||||||
|
res = conn.execute(
|
||||||
|
"SELECT language, label FROM osm_tags WHERE tag_key=? AND tag_value=? AND language in (?, ?, 'en')",
|
||||||
|
(tag_key, tag_value, language, language_short),
|
||||||
|
)
|
||||||
|
result = {result[0]: result[1] for result in res.fetchall()}
|
||||||
|
return result.get(language) or result.get(language_short) or result.get('en')
|
||||||
|
|
||||||
|
|
||||||
def ahmia_blacklist_loader():
|
def ahmia_blacklist_loader():
|
||||||
"""Load data from `ahmia_blacklist.txt` and return a list of MD5 values of onion
|
"""Load data from `ahmia_blacklist.txt` and return a list of MD5 values of onion
|
||||||
names. The MD5 values are fetched by::
|
names. The MD5 values are fetched by::
|
||||||
|
@ -126,6 +165,5 @@ USER_AGENTS = _load('useragents.json')
|
||||||
EXTERNAL_URLS = _load('external_urls.json')
|
EXTERNAL_URLS = _load('external_urls.json')
|
||||||
WIKIDATA_UNITS = _load('wikidata_units.json')
|
WIKIDATA_UNITS = _load('wikidata_units.json')
|
||||||
EXTERNAL_BANGS = _load('external_bangs.json')
|
EXTERNAL_BANGS = _load('external_bangs.json')
|
||||||
OSM_KEYS_TAGS = _load('osm_keys_tags.json')
|
|
||||||
ENGINE_TRAITS = _load('engine_traits.json')
|
ENGINE_TRAITS = _load('engine_traits.json')
|
||||||
LOCALES = _load('locales.json')
|
LOCALES = _load('locales.json')
|
||||||
|
|
1912
searx/data/dumps/osm_keys.csv
Normal file
1912
searx/data/dumps/osm_keys.csv
Normal file
File diff suppressed because it is too large
Load diff
70384
searx/data/dumps/osm_tags.csv
Normal file
70384
searx/data/dumps/osm_tags.csv
Normal file
File diff suppressed because it is too large
Load diff
BIN
searx/data/osm_keys_tags.db
Normal file
BIN
searx/data/osm_keys_tags.db
Normal file
Binary file not shown.
File diff suppressed because it is too large
Load diff
|
@ -6,7 +6,7 @@ from time import time
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
from searx.network import get as http_get
|
from searx.network import get as http_get
|
||||||
from searx.engines.openstreetmap import get_key_label
|
from searx.data import fetch_osm_key_label
|
||||||
|
|
||||||
about = {
|
about = {
|
||||||
"website": 'https://www.apple.com/maps/',
|
"website": 'https://www.apple.com/maps/',
|
||||||
|
@ -72,7 +72,7 @@ def response(resp):
|
||||||
telephone = result['telephone']
|
telephone = result['telephone']
|
||||||
links.append(
|
links.append(
|
||||||
{
|
{
|
||||||
'label': get_key_label('phone', user_language),
|
'label': fetch_osm_key_label('phone', user_language),
|
||||||
'url': 'tel:' + telephone,
|
'url': 'tel:' + telephone,
|
||||||
'url_label': telephone,
|
'url_label': telephone,
|
||||||
}
|
}
|
||||||
|
@ -81,7 +81,7 @@ def response(resp):
|
||||||
url = result['urls'][0]
|
url = result['urls'][0]
|
||||||
links.append(
|
links.append(
|
||||||
{
|
{
|
||||||
'label': get_key_label('website', user_language),
|
'label': fetch_osm_key_label('website', user_language),
|
||||||
'url': url,
|
'url': url,
|
||||||
'url_label': url,
|
'url_label': url,
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@ from functools import partial
|
||||||
|
|
||||||
from flask_babel import gettext
|
from flask_babel import gettext
|
||||||
|
|
||||||
from searx.data import OSM_KEYS_TAGS, fetch_name_from_iso4217
|
from searx.data import fetch_osm_tag_label, fetch_osm_key_label
|
||||||
from searx.utils import searx_useragent
|
from searx.utils import searx_useragent
|
||||||
from searx.external_urls import get_external_url
|
from searx.external_urls import get_external_url
|
||||||
from searx.engines.wikidata import send_wikidata_query, sparql_string_escape, get_thumbnail
|
from searx.engines.wikidata import send_wikidata_query, sparql_string_escape, get_thumbnail
|
||||||
|
@ -187,14 +187,14 @@ def response(resp):
|
||||||
'template': 'map.html',
|
'template': 'map.html',
|
||||||
'title': title,
|
'title': title,
|
||||||
'address': address,
|
'address': address,
|
||||||
'address_label': get_key_label('addr', user_language),
|
'address_label': fetch_osm_key_label('addr', user_language),
|
||||||
'url': url,
|
'url': url,
|
||||||
'osm': osm,
|
'osm': osm,
|
||||||
'geojson': geojson,
|
'geojson': geojson,
|
||||||
'thumbnail': thumbnail,
|
'thumbnail': thumbnail,
|
||||||
'links': links,
|
'links': links,
|
||||||
'data': data,
|
'data': data,
|
||||||
'type': get_tag_label(result.get('category'), result.get('type', ''), user_language),
|
'type': fetch_osm_tag_label(result.get('category'), result.get('type', ''), user_language),
|
||||||
'type_icon': result.get('icon'),
|
'type_icon': result.get('icon'),
|
||||||
'content': '',
|
'content': '',
|
||||||
'longitude': result['lon'],
|
'longitude': result['lon'],
|
||||||
|
@ -367,7 +367,7 @@ def get_links(result, user_language):
|
||||||
url_label = result.get('wikidata', {}).get('itemLabel') or url_label
|
url_label = result.get('wikidata', {}).get('itemLabel') or url_label
|
||||||
links.append(
|
links.append(
|
||||||
{
|
{
|
||||||
'label': get_key_label(k, user_language),
|
'label': fetch_osm_key_label(k, user_language),
|
||||||
'url': url,
|
'url': url,
|
||||||
'url_label': url_label,
|
'url_label': url_label,
|
||||||
}
|
}
|
||||||
|
@ -389,7 +389,7 @@ def get_data(result, user_language, ignore_keys):
|
||||||
continue
|
continue
|
||||||
if get_key_rank(k) is None:
|
if get_key_rank(k) is None:
|
||||||
continue
|
continue
|
||||||
k_label = get_key_label(k, user_language)
|
k_label = fetch_osm_key_label(k, user_language)
|
||||||
if k_label:
|
if k_label:
|
||||||
data.append(
|
data.append(
|
||||||
{
|
{
|
||||||
|
@ -412,51 +412,3 @@ def get_key_rank(k):
|
||||||
# "payment:*" in KEY_ORDER matches "payment:cash", "payment:debit card", etc...
|
# "payment:*" in KEY_ORDER matches "payment:cash", "payment:debit card", etc...
|
||||||
key_rank = KEY_RANKS.get(k.split(':')[0] + ':*')
|
key_rank = KEY_RANKS.get(k.split(':')[0] + ':*')
|
||||||
return key_rank
|
return key_rank
|
||||||
|
|
||||||
|
|
||||||
def get_label(labels, lang):
|
|
||||||
"""Get label from labels in OSM_KEYS_TAGS
|
|
||||||
|
|
||||||
in OSM_KEYS_TAGS, labels have key == '*'
|
|
||||||
"""
|
|
||||||
tag_label = labels.get(lang.lower())
|
|
||||||
if tag_label is None:
|
|
||||||
# example: if 'zh-hk' is not found, check 'zh'
|
|
||||||
tag_label = labels.get(lang.split('-')[0])
|
|
||||||
if tag_label is None and lang != 'en':
|
|
||||||
# example: if 'zh' is not found, check 'en'
|
|
||||||
tag_label = labels.get('en')
|
|
||||||
if tag_label is None and len(labels.values()) > 0:
|
|
||||||
# example: if still not found, use the first entry
|
|
||||||
tag_label = labels.values()[0]
|
|
||||||
return tag_label
|
|
||||||
|
|
||||||
|
|
||||||
def get_tag_label(tag_category, tag_name, lang):
|
|
||||||
"""Get tag label from OSM_KEYS_TAGS"""
|
|
||||||
tag_name = '' if tag_name is None else tag_name
|
|
||||||
tag_labels = OSM_KEYS_TAGS['tags'].get(tag_category, {}).get(tag_name, {})
|
|
||||||
return get_label(tag_labels, lang)
|
|
||||||
|
|
||||||
|
|
||||||
def get_key_label(key_name, lang):
|
|
||||||
"""Get key label from OSM_KEYS_TAGS"""
|
|
||||||
if key_name.startswith('currency:'):
|
|
||||||
# currency:EUR --> get the name from the CURRENCIES variable
|
|
||||||
# see https://wiki.openstreetmap.org/wiki/Key%3Acurrency
|
|
||||||
# and for example https://taginfo.openstreetmap.org/keys/currency:EUR#values
|
|
||||||
# but there is also currency=EUR (currently not handled)
|
|
||||||
# https://taginfo.openstreetmap.org/keys/currency#values
|
|
||||||
currency = key_name.split(':')
|
|
||||||
if len(currency) > 1:
|
|
||||||
label = fetch_name_from_iso4217(currency[1], lang)
|
|
||||||
if label:
|
|
||||||
return label
|
|
||||||
return currency[1]
|
|
||||||
|
|
||||||
labels = OSM_KEYS_TAGS['keys']
|
|
||||||
for k in key_name.split(':') + ['*']:
|
|
||||||
labels = labels.get(k)
|
|
||||||
if labels is None:
|
|
||||||
return None
|
|
||||||
return get_label(labels, lang)
|
|
||||||
|
|
|
@ -42,8 +42,9 @@ Output file: :origin:`searx/data/osm_keys_tags` (:origin:`CI Update data ...
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import csv
|
||||||
import collections
|
import sqlite3
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from searx.network import set_timeout_for_thread
|
from searx.network import set_timeout_for_thread
|
||||||
from searx.engines import wikidata, set_loggers
|
from searx.engines import wikidata, set_loggers
|
||||||
|
@ -51,7 +52,9 @@ from searx.sxng_locales import sxng_locales
|
||||||
from searx.engines.openstreetmap import get_key_rank, VALUE_TO_LINK
|
from searx.engines.openstreetmap import get_key_rank, VALUE_TO_LINK
|
||||||
from searx.data import data_dir
|
from searx.data import data_dir
|
||||||
|
|
||||||
DATA_FILE = data_dir / 'osm_keys_tags.json'
|
DATABASE_FILE = data_dir / 'osm_keys_tags.db'
|
||||||
|
CSV_KEYS_FILE = data_dir / 'dumps' / 'osm_keys.csv'
|
||||||
|
CSV_TAGS_FILE = data_dir / 'dumps' / 'osm_tags.csv'
|
||||||
|
|
||||||
set_loggers(wikidata, 'wikidata')
|
set_loggers(wikidata, 'wikidata')
|
||||||
|
|
||||||
|
@ -78,42 +81,39 @@ ORDER BY ?key ?item ?itemLabel
|
||||||
|
|
||||||
LANGUAGES = [l[0].lower() for l in sxng_locales]
|
LANGUAGES = [l[0].lower() for l in sxng_locales]
|
||||||
|
|
||||||
PRESET_KEYS = {
|
PRESET_KEYS = [
|
||||||
('wikidata',): {'en': 'Wikidata'},
|
["wikidata", "en", "Wikidata"],
|
||||||
('wikipedia',): {'en': 'Wikipedia'},
|
["wikipedia", "en", "Wikipedia"],
|
||||||
('email',): {'en': 'Email'},
|
["email", "en", "email"],
|
||||||
('facebook',): {'en': 'Facebook'},
|
["facebook", "en", "facebook"],
|
||||||
('fax',): {'en': 'Fax'},
|
["fax", "en", "Fax"],
|
||||||
('internet_access', 'ssid'): {'en': 'Wi-Fi'},
|
["internet_access:ssid", "en", "Wi-Fi"],
|
||||||
}
|
]
|
||||||
|
|
||||||
INCLUDED_KEYS = {('addr',)}
|
INCLUDED_KEYS = {('addr',)}
|
||||||
|
|
||||||
|
|
||||||
def get_preset_keys():
|
|
||||||
results = collections.OrderedDict()
|
|
||||||
for keys, value in PRESET_KEYS.items():
|
|
||||||
r = results
|
|
||||||
for k in keys:
|
|
||||||
r = r.setdefault(k, {})
|
|
||||||
r.setdefault('*', value)
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def get_keys():
|
def get_keys():
|
||||||
results = get_preset_keys()
|
result_keys = set()
|
||||||
|
results = PRESET_KEYS.copy()
|
||||||
response = wikidata.send_wikidata_query(SPARQL_KEYS_REQUEST)
|
response = wikidata.send_wikidata_query(SPARQL_KEYS_REQUEST)
|
||||||
|
|
||||||
for key in response['results']['bindings']:
|
for key in response['results']['bindings']:
|
||||||
keys = key['key']['value'].split(':')[1:]
|
keys = key['key']['value'].split(':')[1:]
|
||||||
|
label = key['itemLabel']['value'].lower()
|
||||||
|
lang = key['itemLabel']['xml:lang']
|
||||||
|
|
||||||
|
if lang not in LANGUAGES:
|
||||||
|
continue
|
||||||
|
|
||||||
if keys[0] == 'currency' and len(keys) > 1:
|
if keys[0] == 'currency' and len(keys) > 1:
|
||||||
# special case in openstreetmap.py
|
# special case in openstreetmap.py
|
||||||
continue
|
continue
|
||||||
if keys[0] == 'contact' and len(keys) > 1:
|
if keys[0] == 'contact' and len(keys) > 1:
|
||||||
# label for the key "contact.email" is "Email"
|
if lang == "en":
|
||||||
# whatever the language
|
# label for the key "contact.email" is "Email"
|
||||||
r = results.setdefault('contact', {})
|
# whatever the language
|
||||||
r[keys[1]] = {'*': {'en': keys[1]}}
|
results.append((":".join(keys), "en", keys[1]))
|
||||||
continue
|
continue
|
||||||
if tuple(keys) in PRESET_KEYS:
|
if tuple(keys) in PRESET_KEYS:
|
||||||
# skip presets (already set above)
|
# skip presets (already set above)
|
||||||
|
@ -125,40 +125,46 @@ def get_keys():
|
||||||
):
|
):
|
||||||
# keep only keys that will be displayed by openstreetmap.py
|
# keep only keys that will be displayed by openstreetmap.py
|
||||||
continue
|
continue
|
||||||
label = key['itemLabel']['value'].lower()
|
|
||||||
lang = key['itemLabel']['xml:lang']
|
entry = (":".join(keys), lang, label)
|
||||||
r = results
|
entry_key = (entry[0], entry[1])
|
||||||
for k in keys:
|
if entry_key not in result_keys:
|
||||||
r = r.setdefault(k, {})
|
results.append(entry)
|
||||||
r = r.setdefault('*', {})
|
result_keys.add(entry_key)
|
||||||
if lang in LANGUAGES:
|
|
||||||
r.setdefault(lang, label)
|
|
||||||
|
|
||||||
# special cases
|
# special cases
|
||||||
results['delivery']['covid19']['*'].clear()
|
results = [entry for entry in results if entry[0] != 'delivery:covid19']
|
||||||
for k, v in results['delivery']['*'].items():
|
results.extend(
|
||||||
results['delivery']['covid19']['*'][k] = v + ' (COVID19)'
|
[['delivery:covid19', entry[1], entry[2] + ' (COVID19)'] for entry in results if entry[0] == 'delivery']
|
||||||
|
)
|
||||||
|
|
||||||
results['opening_hours']['covid19']['*'].clear()
|
results = [entry for entry in results if entry[0] != 'opening_hours:covid19']
|
||||||
for k, v in results['opening_hours']['*'].items():
|
results.extend(
|
||||||
results['opening_hours']['covid19']['*'][k] = v + ' (COVID19)'
|
[
|
||||||
|
['opening_hours:covid19', entry[1], entry[2] + ' (COVID19)']
|
||||||
|
for entry in results
|
||||||
|
if entry[0] == 'opening_hours'
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def get_tags():
|
def get_tags():
|
||||||
results = collections.OrderedDict()
|
results = []
|
||||||
response = wikidata.send_wikidata_query(SPARQL_TAGS_REQUEST)
|
response = wikidata.send_wikidata_query(SPARQL_TAGS_REQUEST)
|
||||||
for tag in response['results']['bindings']:
|
for tag in response['results']['bindings']:
|
||||||
tag_names = tag['tag']['value'].split(':')[1].split('=')
|
try:
|
||||||
if len(tag_names) == 2:
|
tag_key, tag_value = tag['tag']['value'].split('=')
|
||||||
tag_category, tag_type = tag_names
|
if tag_key.startswith("Tag:"):
|
||||||
else:
|
tag_key = tag_key[4:]
|
||||||
tag_category, tag_type = tag_names[0], ''
|
except ValueError:
|
||||||
|
print("ignore tag", tag['tag']['value'])
|
||||||
|
continue
|
||||||
label = tag['itemLabel']['value'].lower()
|
label = tag['itemLabel']['value'].lower()
|
||||||
lang = tag['itemLabel']['xml:lang']
|
lang = tag['itemLabel']['xml:lang']
|
||||||
if lang in LANGUAGES:
|
if lang in LANGUAGES:
|
||||||
results.setdefault(tag_category, {}).setdefault(tag_type, {}).setdefault(lang, label)
|
results.append((tag_key, tag_value, lang, label))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
@ -206,9 +212,30 @@ def optimize_keys(data):
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
||||||
set_timeout_for_thread(60)
|
set_timeout_for_thread(60)
|
||||||
result = {
|
osm_keys = get_keys()
|
||||||
'keys': optimize_keys(get_keys()),
|
osm_tags = get_tags()
|
||||||
'tags': optimize_tags(get_tags()),
|
|
||||||
}
|
osm_keys.sort(key=lambda item: (item[0], item[1]))
|
||||||
with DATA_FILE.open('w', encoding="utf8") as f:
|
osm_tags.sort(key=lambda item: (item[0], item[1]))
|
||||||
json.dump(result, f, indent=4, sort_keys=True, ensure_ascii=False)
|
|
||||||
|
Path(DATABASE_FILE).unlink(missing_ok=True)
|
||||||
|
with sqlite3.connect(DATABASE_FILE) as con:
|
||||||
|
cur = con.cursor()
|
||||||
|
cur.execute("CREATE TABLE osm_keys(name, language, label)")
|
||||||
|
cur.executemany("INSERT INTO osm_keys VALUES(?, ?, ?)", osm_keys)
|
||||||
|
cur.execute("CREATE INDEX index_osm_keys ON osm_keys('name', 'language')")
|
||||||
|
cur.execute("CREATE TABLE osm_tags(tag_key, tag_value, language, label)")
|
||||||
|
cur.executemany("INSERT INTO osm_tags VALUES(?, ?, ?, ?)", osm_tags)
|
||||||
|
cur.execute("CREATE INDEX index_osm_tags ON osm_tags('tag_key', 'tag_value', 'language')")
|
||||||
|
con.commit()
|
||||||
|
|
||||||
|
with CSV_KEYS_FILE.open('w', encoding="utf8") as f:
|
||||||
|
w = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
|
||||||
|
w.writerow(["name", "language", "label"])
|
||||||
|
for row in osm_keys:
|
||||||
|
w.writerow(row)
|
||||||
|
with CSV_TAGS_FILE.open('w', encoding="utf8") as f:
|
||||||
|
w = csv.writer(f, quoting=csv.QUOTE_NONNUMERIC)
|
||||||
|
w.writerow(["tag_key", "tag_value", "language", "label"])
|
||||||
|
for row in osm_tags:
|
||||||
|
w.writerow(row)
|
||||||
|
|
Loading…
Add table
Reference in a new issue