diff --git a/searx/engines/google_news.py b/searx/engines/google_news.py index 95d15cfb9..ede615614 100644 --- a/searx/engines/google_news.py +++ b/searx/engines/google_news.py @@ -1,41 +1,56 @@ """ Google (News) - @website https://www.google.com - @provide-api yes (https://developers.google.com/web-search/docs/), - deprecated! + @website https://news.google.com + @provide-api no - @using-api yes - @results JSON - @stable yes (but deprecated) + @using-api no + @results HTML + @stable no @parse url, title, content, publishedDate """ +from lxml import html from urllib import urlencode -from json import loads -from dateutil import parser # search-url categories = ['news'] paging = True language_support = True +safesearch = True +time_range_support = True +number_of_results = 10 -# engine dependent config -url = 'https://ajax.googleapis.com/' -search_url = url + 'ajax/services/search/news?v=2.0&start={offset}&rsz=large&safe=off&filter=off&{query}&hl={lang}' +search_url = 'https://www.google.com/search'\ + '?{query}'\ + '&tbm=nws'\ + '&gws_rd=cr'\ + '&{search_options}' +time_range_attr = "qdr:{range}" +time_range_dict = {'day': 'd', + 'week': 'w', + 'month': 'm'} # do search-request def request(query, params): - offset = (params['pageno'] - 1) * 8 - language = 'en-US' + search_options = { + 'start': (params['pageno'] - 1) * number_of_results + } + + if params['time_range'] in time_range_dict: + search_options['tbs'] = time_range_attr.format(range=time_range_dict[params['time_range']]) + + if safesearch and params['safesearch']: + search_options['safe'] = 'on' + + params['url'] = search_url.format(query=urlencode({'q': query}), + search_options=urlencode(search_options)) + if params['language'] != 'all': - language = params['language'].replace('_', '-') - - params['url'] = search_url.format(offset=offset, - query=urlencode({'q': query}), - lang=language) + language_array = params['language'].lower().split('_') + params['url'] += '&lr=lang_' + language_array[0] return params @@ -44,24 +59,21 @@ def request(query, params): def response(resp): results = [] - search_res = loads(resp.text) - - # return empty array if there are no results - if not search_res.get('responseData', {}).get('results'): - return [] + dom = html.fromstring(resp.text) # parse results - for result in search_res['responseData']['results']: - # parse publishedDate - publishedDate = parser.parse(result['publishedDate']) - if 'url' not in result: - continue + for result in dom.xpath('//div[@class="g"]|//div[@class="g _cy"]'): + r = { + 'url': result.xpath('.//div[@class="_cnc"]//a/@href')[0], + 'title': ''.join(result.xpath('.//div[@class="_cnc"]//h3//text()')), + 'content': ''.join(result.xpath('.//div[@class="st"]//text()')), + } - # append result - results.append({'url': result['unescapedUrl'], - 'title': result['titleNoFormatting'], - 'publishedDate': publishedDate, - 'content': result['content']}) + img = result.xpath('.//img/@src')[0] + if img and not img.startswith('data'): + r['img_src'] = img + + results.append(r) # return results return results diff --git a/tests/unit/engines/test_google_news.py b/tests/unit/engines/test_google_news.py index 31d674121..6454dde47 100644 --- a/tests/unit/engines/test_google_news.py +++ b/tests/unit/engines/test_google_news.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + from collections import defaultdict import mock from searx.engines import google_news @@ -11,16 +13,16 @@ class TestGoogleNewsEngine(SearxTestCase): dicto = defaultdict(dict) dicto['pageno'] = 1 dicto['language'] = 'fr_FR' + dicto['time_range'] = 'w' params = google_news.request(query, dicto) self.assertIn('url', params) self.assertIn(query, params['url']) - self.assertIn('googleapis.com', params['url']) self.assertIn('fr', params['url']) dicto['language'] = 'all' params = google_news.request(query, dicto) self.assertIn('url', params) - self.assertIn('en', params['url']) + self.assertNotIn('fr', params['url']) def test_response(self): self.assertRaises(AttributeError, google_news.response, None) @@ -34,103 +36,15 @@ class TestGoogleNewsEngine(SearxTestCase): response = mock.Mock(text='{"data": []}') self.assertEqual(google_news.response(response), []) - json = """ - { - "responseData": { - "results": [ - { - "GsearchResultClass": "GnewsSearch", - "clusterUrl": "http://news.google.com/news/story?ncl=d2d3t1LMDpNIj2MPPhdTT0ycN4sWM&hl=fr&ned=fr", - "content": "This is the content", - "unescapedUrl": "http://this.is.the.url", - "url": "http://this.is.the.url", - "title": "This is the title", - "titleNoFormatting": "This is the title", - "location": "", - "publisher": "Jeux Actu", - "publishedDate": "Fri, 30 Jan 2015 11:00:25 -0800", - "signedRedirectUrl": "http://news.google.com/", - "language": "fr", - "image": { - "url": "http://i.jeuxactus.com/datas/jeux/d/y/dying-light/vu/dying-light-54cc080b568fb.jpg", - "tbUrl": "http://t1.gstatic.com/images?q=tbn:ANd9GcSF4yYrs9Ycw23DGiOSAZ-5SEPXYwG3LNs", - "originalContextUrl": "http://www.jeuxactu.com/test-dying-light-sur-ps4-97208.htm", - "publisher": "Jeux Actu", - "tbWidth": 80, - "tbHeight": 30 - }, - "relatedStories": [ - { - "unescapedUrl": "http://www.jeuxvideo.com/test/415823/dying-light.htm", - "url": "http%3A%2F%2Fwww.jeuxvideo.com%2Ftest%2F415823%2Fdying-light.htm", - "title": "Test du jeu Dying Light - jeuxvideo.com", - "titleNoFormatting": "Test du jeu Dying Light - jeuxvideo.com", - "location": "", - "publisher": "JeuxVideo.com", - "publishedDate": "Fri, 30 Jan 2015 08:52:30 -0800", - "signedRedirectUrl": "http://news.google.com/news/url?sa=T&", - "language": "fr" - } - ] - } - ] - }, - "responseDetails": null, - "responseStatus": 200 - } - """ - response = mock.Mock(text=json) + html = u""" +