[fix] startpage engine: language/region & time support & fix CAPTCHA

One reason for the often seen CAPTCHA of the startpage requests are the
incomplete requests SearXNG sends to startpage.com.  To avoid CAPTCHA we need to
send a well formed HTTP POST request with a cookie, we need to form a request
that is identical to the request build by startpage.com itself:

- in the cookie the **region** is selected
- in the POST arguments the **language** is selected

Based on the *engine_properties* boilerplate, SearXNG's startpage engine now
implements a `_fetch_engine_properties()` function to fetch regions & languages
from startpage.com.

This patch is a complete new implementation of the request() function, reversed
engineered from the startpage.com page.  The new implementation adds

- time-range support
- save-search support

to the startpage engine which has been missed in the past.

The locale code 'no_NO' from startpage does not exists and is mapped to nb-NO.
For reference see languages-subtag at iana [1], `no` is the macrolanguage::

     type: language
     Subtag: nb
     Description: Norwegian Bokmål
     Added: 2005-10-16
     Suppress-Script: Latn
     Macrolanguage: no

Additional hints:

- To fetch languages from startpage, this patch makes use of the
  EngineProperties implemented in 7bf0d46c

- Te get Startpage's locale & language, the function get_engine_locale from
  9ae409a is used.

[1] https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry
[2] https://www.w3.org/International/questions/qa-choosing-language-tags#langsubtag

Closes: https://github.com/searxng/searxng/issues/1081
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
Markus Heiser 2022-04-16 15:30:09 +02:00
parent 3df7d50df0
commit 744d96a16c
3 changed files with 345 additions and 329 deletions

View file

@ -11,9 +11,9 @@ from lxml import etree
from httpx import HTTPError from httpx import HTTPError
from searx import settings from searx import settings
from searx.data import ENGINES_LANGUAGES
from searx.network import get as http_get from searx.network import get as http_get
from searx.exceptions import SearxEngineResponseException from searx.exceptions import SearxEngineResponseException
from searx.engines import engines
# a _fetch_supported_properites() for XPath engines isn't available right now # a _fetch_supported_properites() for XPath engines isn't available right now
# _brave = ENGINES_LANGUAGES['brave'].keys() # _brave = ENGINES_LANGUAGES['brave'].keys()
@ -110,9 +110,11 @@ def seznam(query, _lang):
def startpage(query, lang): def startpage(query, lang):
# startpage autocompleter # startpage autocompleter
lui = ENGINES_LANGUAGES['startpage'].get(lang, 'english') engine = engines['startpage']
_, engine_language, _ = engine.get_engine_locale(lang)
url = 'https://startpage.com/suggestions?{query}' url = 'https://startpage.com/suggestions?{query}'
resp = get(url.format(query=urlencode({'q': query, 'segment': 'startpage.udog', 'lui': lui}))) resp = get(url.format(query=urlencode({'q': query, 'segment': 'startpage.udog', 'lui': engine_language})))
data = resp.json() data = resp.json()
return [e['text'] for e in data.get('suggestions', []) if 'text' in e] return [e['text'] for e in data.get('suggestions', []) if 'text' in e]

View file

@ -1562,255 +1562,141 @@
"type": "engine_properties" "type": "engine_properties"
}, },
"startpage": { "startpage": {
"af": { "languages": {
"alias": "afrikaans" "af": "afrikaans",
}, "am": "amharic",
"am": { "ar": "arabic",
"alias": "amharic" "az": "azerbaijani",
}, "be": "belarusian",
"ar": { "bg": "bulgarian",
"alias": "arabic" "bn": "bengali",
}, "bs": "bosnian",
"az": { "ca": "catalan",
"alias": "azerbaijani" "cs": "czech",
}, "cy": "welsh",
"be": { "da": "dansk",
"alias": "belarusian" "de": "deutsch",
}, "el": "greek",
"bg": { "en": "english_uk",
"alias": "bulgarian" "eo": "esperanto",
}, "es": "espanol",
"bn": { "et": "estonian",
"alias": "bengali" "eu": "basque",
}, "fa": "persian",
"bs": { "fi": "suomi",
"alias": "bosnian" "fo": "faroese",
}, "fr": "francais",
"ca": { "fy": "frisian",
"alias": "catalan" "ga": "irish",
}, "gd": "gaelic",
"cs": { "gl": "galician",
"alias": "czech" "gu": "gujarati",
}, "he": "hebrew",
"cy": { "hi": "hindi",
"alias": "welsh" "hr": "croatian",
}, "hu": "hungarian",
"da": { "ia": "interlingua",
"alias": "dansk" "id": "indonesian",
}, "is": "icelandic",
"de": { "it": "italiano",
"alias": "deutsch" "ja": "nihongo",
}, "jv": "javanese",
"el": { "ka": "georgian",
"alias": "greek" "kn": "kannada",
}, "ko": "hangul",
"en": { "la": "latin",
"alias": "english" "lt": "lithuanian",
}, "lv": "latvian",
"en-GB": { "mai": "bihari",
"alias": "english_uk" "mk": "macedonian",
}, "ml": "malayalam",
"eo": { "mr": "marathi",
"alias": "esperanto" "ms": "malay",
}, "mt": "maltese",
"es": { "nb": "norsk",
"alias": "espanol" "ne": "nepali",
}, "nl": "nederlands",
"et": { "oc": "occitan",
"alias": "estonian" "pa": "punjabi",
}, "pl": "polski",
"eu": { "pt": "portugues",
"alias": "basque" "ro": "romanian",
}, "ru": "russian",
"fa": { "si": "sinhalese",
"alias": "persian" "sk": "slovak",
}, "sl": "slovenian",
"fi": { "sq": "albanian",
"alias": "suomi" "sr": "serbian",
}, "su": "sudanese",
"fo": { "sv": "svenska",
"alias": "faroese" "sw": "swahili",
}, "ta": "tamil",
"fr": { "te": "telugu",
"alias": "francais" "th": "thai",
}, "ti": "tigrinya",
"fy": { "tl": "tagalog",
"alias": "frisian" "tr": "turkce",
}, "uk": "ukrainian",
"ga": { "ur": "urdu",
"alias": "irish" "uz": "uzbek",
}, "vi": "vietnamese",
"gd": { "xh": "xhosa",
"alias": "gaelic" "zh": "jiantizhongwen",
}, "zh_Hant": "fantizhengwen",
"gl": { "zu": "zulu"
"alias": "galician" },
}, "regions": {
"gu": { "ar-EG": "ar_EG",
"alias": "gujarati" "bg-BG": "bg_BG",
}, "ca-ES": "ca_ES",
"he": { "cs-CZ": "cs_CZ",
"alias": "hebrew" "da-DK": "da_DK",
}, "de-AT": "de_AT",
"hi": { "de-CH": "de_CH",
"alias": "hindi" "de-DE": "de_DE",
}, "el-GR": "el_GR",
"hr": { "en-AU": "en_AU",
"alias": "croatian" "en-CA": "en_CA",
}, "en-GB": "en-GB_GB",
"hu": { "en-IE": "en_IE",
"alias": "hungarian" "en-MY": "en_MY",
}, "en-NZ": "en_NZ",
"ia": { "en-US": "en_US",
"alias": "interlingua" "en-ZA": "en_ZA",
}, "es-AR": "es_AR",
"id": { "es-CL": "es_CL",
"alias": "indonesian" "es-ES": "es_ES",
}, "es-US": "es_US",
"is": { "es-UY": "es_UY",
"alias": "icelandic" "fi-FI": "fi_FI",
}, "fil-PH": "fil_PH",
"it": { "fr-BE": "fr_BE",
"alias": "italiano" "fr-CA": "fr_CA",
}, "fr-CH": "fr_CH",
"ja": { "fr-FR": "fr_FR",
"alias": "nihongo" "hi-IN": "hi_IN",
}, "it-CH": "it_CH",
"jv": { "it-IT": "it_IT",
"alias": "javanese" "ja-JP": "ja_JP",
}, "ko-KR": "ko_KR",
"ka": { "ms-MY": "ms_MY",
"alias": "georgian" "nb-NO": "no_NO",
}, "nl-BE": "nl_BE",
"kn": { "nl-NL": "nl_NL",
"alias": "kannada" "pl-PL": "pl_PL",
}, "pt-BR": "pt-BR_BR",
"ko": { "pt-PT": "pt_PT",
"alias": "hangul" "ro-RO": "ro_RO",
}, "ru-BY": "ru_BY",
"la": { "ru-RU": "ru_RU",
"alias": "latin" "sv-SE": "sv_SE",
}, "tr-TR": "tr_TR",
"lt": { "uk-UA": "uk_UA",
"alias": "lithuanian" "zh-CN": "zh-CN_CN",
}, "zh-HK": "zh-TW_HK",
"lv": { "zh-TW": "zh-TW_TW"
"alias": "latvian" },
}, "type": "engine_properties"
"mai": {
"alias": "bihari"
},
"mk": {
"alias": "macedonian"
},
"ml": {
"alias": "malayalam"
},
"mr": {
"alias": "marathi"
},
"ms": {
"alias": "malay"
},
"mt": {
"alias": "maltese"
},
"ne": {
"alias": "nepali"
},
"nl": {
"alias": "nederlands"
},
"no": {
"alias": "norsk"
},
"oc": {
"alias": "occitan"
},
"pa": {
"alias": "punjabi"
},
"pl": {
"alias": "polski"
},
"pt": {
"alias": "portugues"
},
"ro": {
"alias": "romanian"
},
"ru": {
"alias": "russian"
},
"si": {
"alias": "sinhalese"
},
"sk": {
"alias": "slovak"
},
"sl": {
"alias": "slovenian"
},
"sq": {
"alias": "albanian"
},
"sr": {
"alias": "serbian"
},
"su": {
"alias": "sudanese"
},
"sv": {
"alias": "svenska"
},
"sw": {
"alias": "swahili"
},
"ta": {
"alias": "tamil"
},
"te": {
"alias": "telugu"
},
"th": {
"alias": "thai"
},
"ti": {
"alias": "tigrinya"
},
"tl": {
"alias": "tagalog"
},
"tr": {
"alias": "turkce"
},
"uk": {
"alias": "ukrainian"
},
"ur": {
"alias": "urdu"
},
"uz": {
"alias": "uzbek"
},
"vi": {
"alias": "vietnamese"
},
"xh": {
"alias": "xhosa"
},
"zh": {
"alias": "jiantizhongwen"
},
"zh-HK": {
"alias": "fantizhengwen"
},
"zh-TW": {
"alias": "fantizhengwen"
},
"zu": {
"alias": "zulu"
}
}, },
"wikidata": { "wikidata": {
"ab": { "ab": {
@ -4394,4 +4280,4 @@
"zh_chs", "zh_chs",
"zh_cht" "zh_cht"
] ]
} }

View file

@ -7,17 +7,18 @@
import re import re
from time import time from time import time
from urllib.parse import urlencode
from unicodedata import normalize, combining from unicodedata import normalize, combining
from datetime import datetime, timedelta from datetime import datetime, timedelta
from collections import OrderedDict
from dateutil import parser from dateutil import parser
from lxml import html from lxml import html
from babel import Locale
from babel.localedata import locale_identifiers import babel
from searx.network import get from searx.network import get
from searx.utils import extract_text, eval_xpath, match_language from searx.locales import get_engine_locale
from searx.utils import extract_text, eval_xpath
from searx.exceptions import ( from searx.exceptions import (
SearxEngineResponseException, SearxEngineResponseException,
SearxEngineCaptchaException, SearxEngineCaptchaException,
@ -36,16 +37,22 @@ about = {
# engine dependent config # engine dependent config
categories = ['general', 'web'] categories = ['general', 'web']
# there is a mechanism to block "bot" search
# (probably the parameter qid), require
# storing of qid's between mulitble search-calls
paging = True paging = True
supported_languages_url = 'https://www.startpage.com/do/settings' number_of_results = 5
send_accept_language_header = True
safesearch = True
filter_mapping = {0: '0', 1: '1', 2: '1'}
time_range_support = True
time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
supported_properties_url = 'https://www.startpage.com/do/settings'
# search-url # search-url
base_url = 'https://startpage.com/' base_url = 'https://www.startpage.com/'
search_url = base_url + 'sp/search?' search_url = base_url + 'sp/search'
# specific xpath variables # specific xpath variables
# ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"] # ads xpath //div[@id="results"]/div[@id="sponsored"]//div[@class="result"]
@ -104,42 +111,83 @@ def get_sc_code(headers):
return sc_code return sc_code
# do search-request
def request(query, params): def request(query, params):
# pylint: disable=line-too-long # Startpage supports a region value: 'all'
# The format string from Startpage's FFox add-on [1]:: engine_region = 'all'
# engine_language = 'english_uk'
# https://www.startpage.com/do/dsearch?query={searchTerms}&cat=web&pl=ext-ff&language=__MSG_extensionUrlLanguage__&extVersion=1.3.0 if params['language'] != 'all':
# engine_region = get_engine_locale(params['language'], engine_data.regions, default='all')
# [1] https://addons.mozilla.org/en-US/firefox/addon/startpage-private-search/ engine_language = get_engine_locale(
params['language'].split('-')[0], engine_data.languages, default='english_uk'
)
logger.debug(
'selected language %s --> engine_language: %s // engine_region: %s',
params['language'],
engine_language,
engine_region,
)
# The Accept header is also needed by the get_sc_code(..) call below.
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
# build arguments
args = { args = {
'query': query, 'query': query,
'page': params['pageno'],
'cat': 'web', 'cat': 'web',
# 'pl': 'ext-ff', 't': 'device',
# 'extVersion': '1.3.0', 'sc': get_sc_code(params['headers']), # hint: this func needs HTTP headers
# 'abp': "-1", 'with_date': time_range_dict.get(params['time_range'], ''),
'sc': get_sc_code(params['headers']),
} }
# set language if specified if engine_language:
if params['language'] != 'all': args['language'] = engine_language
lang_code = match_language(params['language'], supported_languages, fallback=None) args['lui'] = engine_language
if lang_code:
language_name = supported_languages[lang_code]['alias'] if params['pageno'] == 1:
args['language'] = language_name args['abp'] = ['-1', '-1']
args['lui'] = language_name
else:
args['page'] = params['pageno']
args['abp'] = '-1'
# build cookie
lang_homepage = 'english'
cookie = OrderedDict()
cookie['date_time'] = 'world'
cookie['disable_family_filter'] = filter_mapping[params['safesearch']]
cookie['disable_open_in_new_window'] = '0'
cookie['enable_post_method'] = '1' # hint: POST
cookie['enable_proxy_safety_suggest'] = '1'
cookie['enable_stay_control'] = '1'
cookie['instant_answers'] = '1'
cookie['lang_homepage'] = 's/device/%s/' % lang_homepage
cookie['num_of_results'] = '10'
cookie['suggestions'] = '1'
cookie['wt_unit'] = 'celsius'
if engine_language:
cookie['language'] = engine_language
cookie['language_ui'] = engine_language
if engine_region:
cookie['search_results_region'] = engine_region
params['cookies']['preferences'] = 'N1N'.join(["%sEEE%s" % x for x in cookie.items()])
logger.debug('cookie preferences: %s', params['cookies']['preferences'])
params['method'] = 'POST'
logger.debug("data: %s", args)
params['data'] = args
params['url'] = search_url
params['url'] = search_url + urlencode(args)
return params return params
# get response from search-request # get response from search-request
def response(resp): def response(resp):
results = [] results = []
dom = html.fromstring(resp.text) dom = html.fromstring(resp.text)
# parse results # parse results
@ -201,62 +249,142 @@ def response(resp):
return results return results
# get supported languages from their site def _fetch_engine_properties(resp, engine_properties):
def _fetch_supported_languages(resp):
# startpage's language selector is a mess each option has a displayed name
# and a value, either of which may represent the language name in the native
# script, the language name in English, an English transliteration of the
# native name, the English name of the writing script used by the language,
# or occasionally something else entirely.
# this cases are so special they need to be hardcoded, a couple of them are mispellings # startpage's language & region selectors are a mess.
language_names = { #
'english_uk': 'en-GB', # regions:
'fantizhengwen': ['zh-TW', 'zh-HK'], # in the list of regions there are tags we need to map to common
'hangul': 'ko', # region tags:
'malayam': 'ml', # - pt-BR_BR --> pt_BR
'norsk': 'nb', # - zh-CN_CN --> zh_Hans_CN
'sinhalese': 'si', # - zh-TW_TW --> zh_Hant_TW
'sudanese': 'su', # - zh-TW_HK --> zh_Hant_HK
} # - en-GB_GB --> en_GB
# and there is at least one tag with a three letter language tag (ISO 639-2)
# - fil_PH --> fil_PH
#
# regions
# -------
#
# The locale code 'no_NO' from startpage does not exists and is mapped to
# nb-NO::
#
# babel.core.UnknownLocaleError: unknown locale 'no_NO'
#
# For reference see languages-subtag at iana [1], `no` is the
# macrolanguage::
#
# type: language
# Subtag: nb
# Description: Norwegian Bokmål
# Added: 2005-10-16
# Suppress-Script: Latn
# Macrolanguage: no
#
# W3C recommends subtag over macrolanguage [2]:
#
# Use macrolanguages with care. Some language subtags have a Scope field set to
# macrolanguage, ie. this primary language subtag encompasses a number of more
# specific primary language subtags in the registry.
# ...
# As we recommended for the collection subtags mentioned above, in most cases
# you should try to use the more specific subtags ...
#
# [1] https://www.iana.org/assignments/language-subtag-registry/language-subtag-registry
# [2] https://www.w3.org/International/questions/qa-choosing-language-tags#langsubtag
#
# languages
# ---------
#
# The displayed name in startpage's settings page depend on the location
# of the IP when the 'Accept-Language' HTTP header is unset (in tha
# language update script we use "en-US,en;q=0.5" to get uniform names
# independent from the IP).
#
# Each option has a displayed name and a value, either of which
# may represent the language name in the native script, the language name
# in English, an English transliteration of the native name, the English
# name of the writing script used by the language, or occasionally
# something else entirely.
# get the English name of every language known by babel dom = html.fromstring(resp.text)
language_names.update(
{ # regions
# fmt: off
name.lower(): lang_code sp_region_names = []
# pylint: disable=protected-access for option in dom.xpath('//form[@name="settings"]//select[@name="search_results_region"]/option'):
for lang_code, name in Locale('en')._data['languages'].items() sp_region_names.append(option.get('value'))
# fmt: on
} for engine_region_tag in sp_region_names:
) if engine_region_tag == 'all':
# 'all' does not fit to a babel locale
continue
locale = None
babel_region_tag = {'no_NO': 'nb_NO'}.get(engine_region_tag, engine_region_tag) # norway
if '-' in babel_region_tag:
# pt-XY_BR --> l=pt, r=BR --> pt-BR
l, r = babel_region_tag.split('-')
r = r.split('_')[-1]
locale = babel.Locale.parse(l + '_' + r, sep='_')
else:
try:
locale = babel.Locale.parse(babel_region_tag, sep='_')
except babel.core.UnknownLocaleError:
print("ERROR: can't determine babel locale of startpage's locale %s" % engine_region_tag)
continue
if locale is None:
continue
region_tag = locale.language + '-' + locale.territory
# print("SearXNG locale tag: %s --> Engine tag: %s" % (region_tag, engine_region_tag))
engine_properties.regions[region_tag] = engine_region_tag
# languages
catalog_engine2code = {name.lower(): lang_code for lang_code, name in babel.Locale('en').languages.items()}
# get the native name of every language known by babel # get the native name of every language known by babel
for lang_code in filter(lambda lang_code: lang_code.find('_') == -1, locale_identifiers()):
native_name = Locale(lang_code).get_language_name().lower() for lang_code in filter(lambda lang_code: lang_code.find('_') == -1, babel.localedata.locale_identifiers()):
native_name = babel.Locale(lang_code).get_language_name().lower()
# add native name exactly as it is # add native name exactly as it is
language_names[native_name] = lang_code catalog_engine2code[native_name] = lang_code
# add "normalized" language name (i.e. français becomes francais and español becomes espanol) # add "normalized" language name (i.e. français becomes francais and español becomes espanol)
unaccented_name = ''.join(filter(lambda c: not combining(c), normalize('NFKD', native_name))) unaccented_name = ''.join(filter(lambda c: not combining(c), normalize('NFKD', native_name)))
if len(unaccented_name) == len(unaccented_name.encode()): if len(unaccented_name) == len(unaccented_name.encode()):
# add only if result is ascii (otherwise "normalization" didn't work) # add only if result is ascii (otherwise "normalization" didn't work)
language_names[unaccented_name] = lang_code catalog_engine2code[unaccented_name] = lang_code
# values that can't be determined by babel's languages names
catalog_engine2code.update(
{
'english_uk': 'en',
# traditional chinese used in ..
'fantizhengwen': 'zh_Hant',
# Korean alphabet
'hangul': 'ko',
# Malayalam is one of 22 scheduled languages of India.
'malayam': 'ml',
'norsk': 'nb',
'sinhalese': 'si',
}
)
dom = html.fromstring(resp.text)
sp_lang_names = []
for option in dom.xpath('//form[@name="settings"]//select[@name="language"]/option'): for option in dom.xpath('//form[@name="settings"]//select[@name="language"]/option'):
sp_lang_names.append((option.get('value'), extract_text(option).lower())) engine_lang = option.get('value')
name = extract_text(option).lower()
supported_languages = {} lang_code = catalog_engine2code.get(engine_lang)
for sp_option_value, sp_option_text in sp_lang_names: if lang_code is None:
lang_code = language_names.get(sp_option_value) or language_names.get(sp_option_text) lang_code = catalog_engine2code[name]
if isinstance(lang_code, str):
supported_languages[lang_code] = {'alias': sp_option_value}
elif isinstance(lang_code, list):
for _lc in lang_code:
supported_languages[_lc] = {'alias': sp_option_value}
else:
print('Unknown language option in Startpage: {} ({})'.format(sp_option_value, sp_option_text))
return supported_languages # print("SearXNG language tag: %s --> Engine tag: %s" % (lang_code, engine_lang))
engine_properties.languages[lang_code] = engine_lang
return engine_properties