searxngRebrandZaclys/searx/engines/apkmirror.py

64 lines
1.5 KiB
Python
Raw Normal View History

# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
"""APKMirror
2019-02-12 23:37:29 +00:00
"""
# pylint: disable=invalid-name
2019-02-12 23:37:29 +00:00
from urllib.parse import urlencode
2019-02-12 23:37:29 +00:00
from lxml import html
from searx.utils import (
eval_xpath_list,
eval_xpath_getindex,
extract_text,
)
about = {
"website": 'https://www.apkmirror.com',
"wikidata_id": None,
"official_api_documentation": None,
"use_official_api": False,
"require_api_key": False,
"results": 'HTML',
}
2019-02-12 23:37:29 +00:00
# engine dependent config
2021-12-22 15:58:52 +00:00
categories = ['files', 'apps']
2019-02-12 23:37:29 +00:00
paging = True
time_range_support = False
# search-url
base_url = 'https://www.apkmirror.com'
search_url = base_url + '/?post_type=app_release&searchtype=apk&page={pageno}&{query}'
def request(query, params):
params['url'] = search_url.format(
pageno=params['pageno'],
query=urlencode({'s': query}),
)
logger.debug("query_url --> %s", params['url'])
2019-02-12 23:37:29 +00:00
return params
def response(resp):
2023-03-15 18:53:53 +00:00
# codespace test
2019-02-12 23:37:29 +00:00
results = []
dom = html.fromstring(resp.text)
# parse results
for result in eval_xpath_list(dom, "//div[@id='content']//div[@class='listWidget']/div/div[@class='appRow']"):
2019-02-12 23:37:29 +00:00
link = eval_xpath_getindex(result, './/h5/a', 0)
2019-02-12 23:37:29 +00:00
url = base_url + link.attrib.get('href') + '#downloads'
title = extract_text(link)
img_src = base_url + eval_xpath_getindex(result, './/img/@src', 0)
res = {'url': url, 'title': title, 'img_src': img_src}
2019-02-12 23:37:29 +00:00
results.append(res)
return results