forked from zaclys/searxng
Merge pull request #357 from asciimoo/google_engine
[enh] google engine : parse map links and more
This commit is contained in:
commit
f2cbefeb54
|
@ -8,6 +8,7 @@
|
||||||
# @stable no (HTML can change)
|
# @stable no (HTML can change)
|
||||||
# @parse url, title, content, suggestion
|
# @parse url, title, content, suggestion
|
||||||
|
|
||||||
|
import re
|
||||||
from urllib import urlencode
|
from urllib import urlencode
|
||||||
from urlparse import urlparse, parse_qsl
|
from urlparse import urlparse, parse_qsl
|
||||||
from lxml import html
|
from lxml import html
|
||||||
|
@ -78,15 +79,22 @@ country_to_hostname = {
|
||||||
'TW': 'www.google.com.tw' # Taiwan
|
'TW': 'www.google.com.tw' # Taiwan
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# osm
|
||||||
|
url_map = 'https://www.openstreetmap.org/'\
|
||||||
|
+ '?lat={latitude}&lon={longitude}&zoom={zoom}&layers=M'
|
||||||
|
|
||||||
# search-url
|
# search-url
|
||||||
search_path = '/search'
|
search_path = '/search'
|
||||||
maps_path = '/maps/'
|
|
||||||
redirect_path = '/url'
|
|
||||||
images_path = '/images'
|
|
||||||
search_url = ('https://{hostname}' +
|
search_url = ('https://{hostname}' +
|
||||||
search_path +
|
search_path +
|
||||||
'?{query}&start={offset}&gbv=1')
|
'?{query}&start={offset}&gbv=1')
|
||||||
|
|
||||||
|
# other URLs
|
||||||
|
map_hostname_start = 'maps.google.'
|
||||||
|
maps_path = '/maps'
|
||||||
|
redirect_path = '/url'
|
||||||
|
images_path = '/images'
|
||||||
|
|
||||||
# specific xpath variables
|
# specific xpath variables
|
||||||
results_xpath = '//li[@class="g"]'
|
results_xpath = '//li[@class="g"]'
|
||||||
url_xpath = './/h3/a/@href'
|
url_xpath = './/h3/a/@href'
|
||||||
|
@ -95,10 +103,29 @@ content_xpath = './/span[@class="st"]'
|
||||||
content_misc_xpath = './/div[@class="f slp"]'
|
content_misc_xpath = './/div[@class="f slp"]'
|
||||||
suggestion_xpath = '//p[@class="_Bmc"]'
|
suggestion_xpath = '//p[@class="_Bmc"]'
|
||||||
|
|
||||||
|
# map : detail location
|
||||||
|
map_address_xpath = './/div[@class="s"]//table//td[2]/span/text()'
|
||||||
|
map_phone_xpath = './/div[@class="s"]//table//td[2]/span/span'
|
||||||
|
map_website_url_xpath = 'h3[2]/a/@href'
|
||||||
|
map_website_title_xpath = 'h3[2]'
|
||||||
|
|
||||||
|
# map : near the location
|
||||||
|
map_near = 'table[@class="ts"]//tr'
|
||||||
|
map_near_title = './/h4'
|
||||||
|
map_near_url = './/h4/a/@href'
|
||||||
|
map_near_phone = './/span[@class="nobr"]'
|
||||||
|
|
||||||
|
# images
|
||||||
images_xpath = './/div/a'
|
images_xpath = './/div/a'
|
||||||
image_url_xpath = './@href'
|
image_url_xpath = './@href'
|
||||||
image_img_src_xpath = './img/@src'
|
image_img_src_xpath = './img/@src'
|
||||||
|
|
||||||
|
# property names
|
||||||
|
# FIXME : no translation
|
||||||
|
property_address = "Address"
|
||||||
|
property_phone = "Phone number"
|
||||||
|
|
||||||
|
# cookies
|
||||||
pref_cookie = ''
|
pref_cookie = ''
|
||||||
nid_cookie = {}
|
nid_cookie = {}
|
||||||
|
|
||||||
|
@ -122,6 +149,11 @@ def get_google_nid_cookie(google_hostname):
|
||||||
|
|
||||||
# remove google-specific tracking-url
|
# remove google-specific tracking-url
|
||||||
def parse_url(url_string, google_hostname):
|
def parse_url(url_string, google_hostname):
|
||||||
|
# sanity check
|
||||||
|
if url_string is None:
|
||||||
|
return url_string
|
||||||
|
|
||||||
|
# normal case
|
||||||
parsed_url = urlparse(url_string)
|
parsed_url = urlparse(url_string)
|
||||||
if (parsed_url.netloc in [google_hostname, '']
|
if (parsed_url.netloc in [google_hostname, '']
|
||||||
and parsed_url.path == redirect_path):
|
and parsed_url.path == redirect_path):
|
||||||
|
@ -151,7 +183,7 @@ def request(query, params):
|
||||||
if len(language_array) == 2:
|
if len(language_array) == 2:
|
||||||
country = language_array[1]
|
country = language_array[1]
|
||||||
else:
|
else:
|
||||||
country = ' '
|
country = 'US'
|
||||||
language = language_array[0] + ',' + language_array[0] + '-' + country
|
language = language_array[0] + ',' + language_array[0] + '-' + country
|
||||||
|
|
||||||
if use_locale_domain:
|
if use_locale_domain:
|
||||||
|
@ -196,21 +228,32 @@ def response(resp):
|
||||||
try:
|
try:
|
||||||
url = parse_url(extract_url(result.xpath(url_xpath), google_url), google_hostname)
|
url = parse_url(extract_url(result.xpath(url_xpath), google_url), google_hostname)
|
||||||
parsed_url = urlparse(url, google_hostname)
|
parsed_url = urlparse(url, google_hostname)
|
||||||
if (parsed_url.netloc == google_hostname
|
|
||||||
and (parsed_url.path == search_path
|
# map result
|
||||||
or parsed_url.path.startswith(maps_path))):
|
if ((parsed_url.netloc == google_hostname and parsed_url.path.startswith(maps_path))
|
||||||
# remove the link to google news and google maps
|
or (parsed_url.netloc.startswith(map_hostname_start))):
|
||||||
# FIXME : sometimes the URL is https://maps.google.*/maps
|
x = result.xpath(map_near)
|
||||||
# no consequence, the result trigger an exception after which is ignored
|
if len(x) > 0:
|
||||||
continue
|
# map : near the location
|
||||||
|
results = results + parse_map_near(parsed_url, x, google_hostname)
|
||||||
|
else:
|
||||||
|
# map : detail about a location
|
||||||
|
results = results + parse_map_detail(parsed_url, result, google_hostname)
|
||||||
|
|
||||||
|
# google news
|
||||||
|
elif (parsed_url.netloc == google_hostname
|
||||||
|
and parsed_url.path == search_path):
|
||||||
|
# skipping news results
|
||||||
|
pass
|
||||||
|
|
||||||
# images result
|
# images result
|
||||||
if (parsed_url.netloc == google_hostname
|
elif (parsed_url.netloc == google_hostname
|
||||||
and parsed_url.path == images_path):
|
and parsed_url.path == images_path):
|
||||||
# only thumbnail image provided,
|
# only thumbnail image provided,
|
||||||
# so skipping image results
|
# so skipping image results
|
||||||
# results = results + parse_images(result, google_hostname)
|
# results = results + parse_images(result, google_hostname)
|
||||||
pass
|
pass
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# normal result
|
# normal result
|
||||||
content = extract_text_from_dom(result, content_xpath)
|
content = extract_text_from_dom(result, content_xpath)
|
||||||
|
@ -222,8 +265,9 @@ def response(resp):
|
||||||
# append result
|
# append result
|
||||||
results.append({'url': url,
|
results.append({'url': url,
|
||||||
'title': title,
|
'title': title,
|
||||||
'content': content})
|
'content': content
|
||||||
except Exception:
|
})
|
||||||
|
except:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# parse suggestion
|
# parse suggestion
|
||||||
|
@ -246,6 +290,77 @@ def parse_images(result, google_hostname):
|
||||||
'title': '',
|
'title': '',
|
||||||
'content': '',
|
'content': '',
|
||||||
'img_src': img_src,
|
'img_src': img_src,
|
||||||
'template': 'images.html'})
|
'template': 'images.html'
|
||||||
|
})
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def parse_map_near(parsed_url, x, google_hostname):
|
||||||
|
results = []
|
||||||
|
|
||||||
|
for result in x:
|
||||||
|
title = extract_text_from_dom(result, map_near_title)
|
||||||
|
url = parse_url(extract_text_from_dom(result, map_near_url), google_hostname)
|
||||||
|
attributes = []
|
||||||
|
phone = extract_text_from_dom(result, map_near_phone)
|
||||||
|
add_attributes(attributes, property_phone, phone, 'tel:' + phone)
|
||||||
|
results.append({'title': title,
|
||||||
|
'url': url,
|
||||||
|
'content': attributes_to_html(attributes)
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def parse_map_detail(parsed_url, result, google_hostname):
|
||||||
|
results = []
|
||||||
|
|
||||||
|
# try to parse the geoloc
|
||||||
|
m = re.search('@([0-9\.]+),([0-9\.]+),([0-9]+)', parsed_url.path)
|
||||||
|
if m is None:
|
||||||
|
m = re.search('ll\=([0-9\.]+),([0-9\.]+)\&z\=([0-9]+)', parsed_url.query)
|
||||||
|
|
||||||
|
if m is not None:
|
||||||
|
# geoloc found (ignored)
|
||||||
|
lon = float(m.group(2)) # noqa
|
||||||
|
lat = float(m.group(1)) # noqa
|
||||||
|
zoom = int(m.group(3)) # noqa
|
||||||
|
|
||||||
|
# attributes
|
||||||
|
attributes = []
|
||||||
|
address = extract_text_from_dom(result, map_address_xpath)
|
||||||
|
phone = extract_text_from_dom(result, map_phone_xpath)
|
||||||
|
add_attributes(attributes, property_address, address, 'geo:' + str(lat) + ',' + str(lon))
|
||||||
|
add_attributes(attributes, property_phone, phone, 'tel:' + phone)
|
||||||
|
|
||||||
|
# title / content / url
|
||||||
|
website_title = extract_text_from_dom(result, map_website_title_xpath)
|
||||||
|
content = extract_text_from_dom(result, content_xpath)
|
||||||
|
website_url = parse_url(extract_text_from_dom(result, map_website_url_xpath), google_hostname)
|
||||||
|
|
||||||
|
# add a result if there is a website
|
||||||
|
if website_url is not None:
|
||||||
|
results.append({'title': website_title,
|
||||||
|
'content': (content + '<br />' if content is not None else '')
|
||||||
|
+ attributes_to_html(attributes),
|
||||||
|
'url': website_url
|
||||||
|
})
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
def add_attributes(attributes, name, value, url):
|
||||||
|
if value is not None and len(value) > 0:
|
||||||
|
attributes.append({'label': name, 'value': value, 'url': url})
|
||||||
|
|
||||||
|
|
||||||
|
def attributes_to_html(attributes):
|
||||||
|
retval = '<table class="table table-striped">'
|
||||||
|
for a in attributes:
|
||||||
|
value = a.get('value')
|
||||||
|
if 'url' in a:
|
||||||
|
value = '<a href="' + a.get('url') + '">' + value + '</a>'
|
||||||
|
retval = retval + '<tr><th>' + a.get('label') + '</th><td>' + value + '</td></tr>'
|
||||||
|
retval = retval + '</table>'
|
||||||
|
return retval
|
||||||
|
|
Loading…
Reference in New Issue