mirror of
https://github.com/searxng/searxng
synced 2024-01-01 19:24:07 +01:00
[feat] public domain image archive: automatically obtain algolia api key
This commit is contained in:
parent
7e82515de0
commit
d7337b58c3
1 changed files with 56 additions and 15 deletions
|
@ -1,15 +1,12 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
"""Public domain image archive, based on the unsplash engine
|
"""Public domain image archive"""
|
||||||
|
|
||||||
Meow meow
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from urllib.parse import urlencode, urlparse, urlunparse, parse_qsl
|
from urllib.parse import urlencode, urlparse, urlunparse, parse_qsl
|
||||||
from json import dumps
|
from json import dumps
|
||||||
|
|
||||||
algolia_api_key = "153d2a10ce67a0be5484de130a132050"
|
from searx.network import get
|
||||||
"""Algolia API key. See engine documentation """
|
from searx.utils import extr
|
||||||
|
from searx.exceptions import SearxEngineAccessDeniedException, SearxEngineException
|
||||||
|
|
||||||
THUMBNAIL_SUFFIX = "?fit=max&h=360&w=360"
|
THUMBNAIL_SUFFIX = "?fit=max&h=360&w=360"
|
||||||
"""
|
"""
|
||||||
|
@ -42,23 +39,58 @@ about = {
|
||||||
"results": 'JSON',
|
"results": 'JSON',
|
||||||
}
|
}
|
||||||
|
|
||||||
base_url = 'https://oqi2j6v4iz-dsn.algolia.net/'
|
base_url = 'https://oqi2j6v4iz-dsn.algolia.net'
|
||||||
search_url = base_url + f'1/indexes/*/queries?x-algolia-api-key={algolia_api_key}&x-algolia-application-id=OQI2J6V4IZ'
|
pdia_config_url = 'https://pdimagearchive.org/_astro/config.BiNvrvzG.js'
|
||||||
categories = ['images']
|
categories = ['images']
|
||||||
page_size = 20
|
page_size = 20
|
||||||
paging = True
|
paging = True
|
||||||
|
|
||||||
|
|
||||||
def clean_url(url):
|
__CACHED_API_KEY = None
|
||||||
|
|
||||||
|
|
||||||
|
def _clean_url(url):
|
||||||
parsed = urlparse(url)
|
parsed = urlparse(url)
|
||||||
query = [(k, v) for (k, v) in parse_qsl(parsed.query) if k not in ['ixid', 's']]
|
query = [(k, v) for (k, v) in parse_qsl(parsed.query) if k not in ['ixid', 's']]
|
||||||
|
|
||||||
return urlunparse((parsed.scheme, parsed.netloc, parsed.path, parsed.params, urlencode(query), parsed.fragment))
|
return urlunparse((parsed.scheme, parsed.netloc, parsed.path, parsed.params, urlencode(query), parsed.fragment))
|
||||||
|
|
||||||
|
|
||||||
|
def _get_algolia_api_key():
|
||||||
|
global __CACHED_API_KEY # pylint:disable=global-statement
|
||||||
|
|
||||||
|
if __CACHED_API_KEY:
|
||||||
|
return __CACHED_API_KEY
|
||||||
|
|
||||||
|
resp = get(pdia_config_url)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise LookupError("Failed to obtain Algolia API key for PDImageArchive")
|
||||||
|
|
||||||
|
api_key = extr(resp.text, 'r="', '"', default=None)
|
||||||
|
|
||||||
|
if api_key is None:
|
||||||
|
raise LookupError("Couldn't obtain Algolia API key for PDImageArchive")
|
||||||
|
|
||||||
|
__CACHED_API_KEY = api_key
|
||||||
|
return api_key
|
||||||
|
|
||||||
|
|
||||||
|
def _clear_cached_api_key():
|
||||||
|
global __CACHED_API_KEY # pylint:disable=global-statement
|
||||||
|
|
||||||
|
__CACHED_API_KEY = None
|
||||||
|
|
||||||
|
|
||||||
def request(query, params):
|
def request(query, params):
|
||||||
params['url'] = search_url
|
api_key = _get_algolia_api_key()
|
||||||
|
|
||||||
|
args = {
|
||||||
|
'x-algolia-api-key': api_key,
|
||||||
|
'x-algolia-application-id': 'OQI2J6V4IZ',
|
||||||
|
}
|
||||||
|
params['url'] = f"{base_url}/1/indexes/*/queries?{urlencode(args)}"
|
||||||
params["method"] = "POST"
|
params["method"] = "POST"
|
||||||
|
|
||||||
request_params = {
|
request_params = {
|
||||||
"page": params["pageno"] - 1,
|
"page": params["pageno"] - 1,
|
||||||
"query": query,
|
"query": query,
|
||||||
|
@ -71,7 +103,9 @@ def request(query, params):
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
params["data"] = dumps(data)
|
params["data"] = dumps(data)
|
||||||
logger.debug("query_url --> %s", params['url'])
|
|
||||||
|
# http errors are handled manually to be able to reset the api key
|
||||||
|
params['raise_for_httperror'] = False
|
||||||
return params
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
@ -79,6 +113,13 @@ def response(resp):
|
||||||
results = []
|
results = []
|
||||||
json_data = resp.json()
|
json_data = resp.json()
|
||||||
|
|
||||||
|
if resp.status_code == 403:
|
||||||
|
_clear_cached_api_key()
|
||||||
|
raise SearxEngineAccessDeniedException()
|
||||||
|
|
||||||
|
if resp.status_code != 200:
|
||||||
|
raise SearxEngineException()
|
||||||
|
|
||||||
if 'results' not in json_data:
|
if 'results' not in json_data:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
@ -97,9 +138,9 @@ def response(resp):
|
||||||
results.append(
|
results.append(
|
||||||
{
|
{
|
||||||
'template': 'images.html',
|
'template': 'images.html',
|
||||||
'url': clean_url(f"{about['website']}/images/{result['objectID']}"),
|
'url': _clean_url(f"{about['website']}/images/{result['objectID']}"),
|
||||||
'img_src': clean_url(base_image_url),
|
'img_src': _clean_url(base_image_url),
|
||||||
'thumbnail_src': clean_url(base_image_url + THUMBNAIL_SUFFIX),
|
'thumbnail_src': _clean_url(base_image_url + THUMBNAIL_SUFFIX),
|
||||||
'title': f"{result['title'].strip()} by {result['artist']} {result.get('displayYear', '')}",
|
'title': f"{result['title'].strip()} by {result['artist']} {result.get('displayYear', '')}",
|
||||||
'content': content,
|
'content': content,
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue