mirror of
https://github.com/searxng/searxng.git
synced 2024-11-22 19:01:00 +00:00
[fix] openstreetmap engine
It seems there is an API change: extratags can be either a dictionnary or None. This commit avoid crash when extratags is None Test query "!osm gare du nord"
This commit is contained in:
parent
0647f83a3e
commit
e16c007c22
1 changed files with 38 additions and 26 deletions
|
@ -162,8 +162,15 @@ def response(resp):
|
|||
}
|
||||
)
|
||||
|
||||
# simplify the code below: make sure extratags is a dictionnary
|
||||
for result in nominatim_json:
|
||||
if not isinstance(result.get('extratags'), dict):
|
||||
result["extratags"] = {}
|
||||
|
||||
# fetch data from wikidata
|
||||
fetch_wikidata(nominatim_json, user_language)
|
||||
|
||||
# create results
|
||||
for result in nominatim_json:
|
||||
title, address = get_title_address(result)
|
||||
|
||||
|
@ -218,13 +225,12 @@ def fetch_wikidata(nominatim_json, user_language):
|
|||
wikidata_ids = []
|
||||
wd_to_results = {}
|
||||
for result in nominatim_json:
|
||||
e = result.get("extratags")
|
||||
if e:
|
||||
# ignore brand:wikidata
|
||||
wd_id = e.get("wikidata", e.get("wikidata link"))
|
||||
if wd_id and wd_id not in wikidata_ids:
|
||||
wikidata_ids.append("wd:" + wd_id)
|
||||
wd_to_results.setdefault(wd_id, []).append(result)
|
||||
extratags = result['extratags']
|
||||
# ignore brand:wikidata
|
||||
wd_id = extratags.get('wikidata', extratags.get('wikidata link'))
|
||||
if wd_id and wd_id not in wikidata_ids:
|
||||
wikidata_ids.append('wd:' + wd_id)
|
||||
wd_to_results.setdefault(wd_id, []).append(result)
|
||||
|
||||
if wikidata_ids:
|
||||
user_language = 'en' if user_language == 'all' else user_language.split('-')[0]
|
||||
|
@ -334,12 +340,13 @@ def get_img_src(result):
|
|||
img_src = result['wikidata']['image_sign']
|
||||
|
||||
# img_src
|
||||
if not img_src and result.get('extratags', {}).get('image'):
|
||||
img_src = result['extratags']['image']
|
||||
del result['extratags']['image']
|
||||
if not img_src and result.get('extratags', {}).get('wikimedia_commons'):
|
||||
img_src = get_external_url('wikimedia_image', result['extratags']['wikimedia_commons'])
|
||||
del result['extratags']['wikimedia_commons']
|
||||
extratags = result['extratags']
|
||||
if not img_src and extratags.get('image'):
|
||||
img_src = extratags['image']
|
||||
del extratags['image']
|
||||
if not img_src and extratags.get('wikimedia_commons'):
|
||||
img_src = get_external_url('wikimedia_image', extratags['wikimedia_commons'])
|
||||
del extratags['wikimedia_commons']
|
||||
|
||||
return img_src
|
||||
|
||||
|
@ -348,20 +355,25 @@ def get_links(result, user_language):
|
|||
"""Return links from result['extratags']"""
|
||||
links = []
|
||||
link_keys = set()
|
||||
extratags = result['extratags']
|
||||
if not extratags:
|
||||
# minor optimization : no need to check VALUE_TO_LINK if extratags is empty
|
||||
return links, link_keys
|
||||
for k, mapping_function in VALUE_TO_LINK.items():
|
||||
raw_value = result['extratags'].get(k)
|
||||
if raw_value:
|
||||
url, url_label = mapping_function(raw_value)
|
||||
if url.startswith('https://wikidata.org'):
|
||||
url_label = result.get('wikidata', {}).get('itemLabel') or url_label
|
||||
links.append(
|
||||
{
|
||||
'label': get_key_label(k, user_language),
|
||||
'url': url,
|
||||
'url_label': url_label,
|
||||
}
|
||||
)
|
||||
link_keys.add(k)
|
||||
raw_value = extratags.get(k)
|
||||
if not raw_value:
|
||||
continue
|
||||
url, url_label = mapping_function(raw_value)
|
||||
if url.startswith('https://wikidata.org'):
|
||||
url_label = result.get('wikidata', {}).get('itemLabel') or url_label
|
||||
links.append(
|
||||
{
|
||||
'label': get_key_label(k, user_language),
|
||||
'url': url,
|
||||
'url_label': url_label,
|
||||
}
|
||||
)
|
||||
link_keys.add(k)
|
||||
return links, link_keys
|
||||
|
||||
|
||||
|
|
Loading…
Reference in a new issue