[mod] do not consent to tracking when using google

Cherry picked from searx/searx@5b50d7

[5b50d7] https://github.com/searx/searx/commit/5b50d7
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
Noémi Ványi 2022-08-02 19:19:12 +02:00 committed by Markus Heiser
parent 6f28a69f12
commit 609eeaf151
5 changed files with 10 additions and 5 deletions

View file

@ -26,6 +26,7 @@ The google WEB engine itself has a special setup option:
"""
from urllib.parse import urlencode
from random import random
from lxml import html
from searx.utils import match_language, extract_text, eval_xpath, eval_xpath_list, eval_xpath_getindex
from searx.exceptions import SearxEngineCaptchaException
@ -282,7 +283,7 @@ def request(query, params):
query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
params['url'] = query_url
params['cookies']['CONSENT'] = "YES+"
params['cookies']['CONSENT'] = "PENDING+" + str(random() * 100)
params['headers'].update(lang_info['headers'])
if use_mobile_ui:
params['headers']['Accept'] = '*/*'

View file

@ -17,6 +17,7 @@ import binascii
import re
from urllib.parse import urlencode
from base64 import b64decode
from random import random
from lxml import html
from searx.utils import (
@ -102,7 +103,7 @@ def request(query, params):
) # ceid includes a ':' character which must not be urlencoded
params['url'] = query_url
params['cookies']['CONSENT'] = "YES+"
params['cookies']['CONSENT'] = "PENDING+" + str(random() * 100)
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'

View file

@ -13,6 +13,7 @@ Definitions`_.
from urllib.parse import urlencode
from datetime import datetime
from random import random
from lxml import html
from searx.utils import (
@ -91,7 +92,7 @@ def request(query, params):
query_url += time_range_url(params)
params['url'] = query_url
params['cookies']['CONSENT'] = "YES+"
params['cookies']['CONSENT'] = "PENDING+" + str(random() * 100)
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'

View file

@ -18,6 +18,7 @@
import re
from urllib.parse import urlencode
from random import random
from lxml import html
from searx.utils import (
@ -127,7 +128,7 @@ def request(query, params):
query_url += '&' + urlencode({'safe': filter_mapping[params['safesearch']]})
params['url'] = query_url
params['cookies']['CONSENT'] = "YES+"
params['cookies']['CONSENT'] = "PENDING+" + str(random() * 100)
params['headers'].update(lang_info['headers'])
params['headers']['Accept'] = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
return params

View file

@ -6,6 +6,7 @@
from functools import reduce
from json import loads, dumps
from urllib.parse import quote_plus
from random import random
# about
about = {
@ -36,7 +37,7 @@ base_youtube_url = 'https://www.youtube.com/watch?v='
# do search-request
def request(query, params):
params['cookies']['CONSENT'] = "YES+"
params['cookies']['CONSENT'] = "PENDING+" + str(random() * 100)
if not params['engine_data'].get('next_page_token'):
params['url'] = search_url.format(query=quote_plus(query), page=params['pageno'])
if params['time_range'] in time_range_dict: