diff --git a/searx/engines/bing_news.py b/searx/engines/bing_news.py index 789a23b89..182bd36b5 100644 --- a/searx/engines/bing_news.py +++ b/searx/engines/bing_news.py @@ -15,6 +15,7 @@ from lxml import html from datetime import datetime, timedelta from dateutil import parser import re +from searx.engines.xpath import extract_text # engine dependent config categories = ['news'] @@ -42,6 +43,7 @@ def request(query, params): params['cookies']['_FP'] = "ui=en-US" params['url'] = base_url + search_path + return params @@ -55,44 +57,37 @@ def response(resp): for result in dom.xpath('//div[@class="sn_r"]'): link = result.xpath('.//div[@class="newstitle"]/a')[0] url = link.attrib.get('href') - title = ' '.join(link.xpath('.//text()')) - contentXPath = result.xpath('.//div[@class="sn_txt"]/div' - '//span[@class="sn_snip"]//text()') + title = extract_text(link) + contentXPath = result.xpath('.//div[@class="sn_txt"]/div//span[@class="sn_snip"]') if contentXPath is not None: - content = escape(' '.join(contentXPath)) + content = escape(extract_text(contentXPath)) # parse publishedDate publishedDateXPath = result.xpath('.//div[@class="sn_txt"]/div' '//span[contains(@class,"sn_ST")]' - '//span[contains(@class,"sn_tm")]' - '//text()') + '//span[contains(@class,"sn_tm")]') + if publishedDateXPath is not None: - publishedDate = escape(' '.join(publishedDateXPath)) + publishedDate = escape(extract_text(publishedDateXPath)) if re.match("^[0-9]+ minute(s|) ago$", publishedDate): timeNumbers = re.findall(r'\d+', publishedDate) - publishedDate = datetime.now()\ - - timedelta(minutes=int(timeNumbers[0])) + publishedDate = datetime.now() - timedelta(minutes=int(timeNumbers[0])) elif re.match("^[0-9]+ hour(s|) ago$", publishedDate): timeNumbers = re.findall(r'\d+', publishedDate) - publishedDate = datetime.now()\ - - timedelta(hours=int(timeNumbers[0])) - elif re.match("^[0-9]+ hour(s|)," - " [0-9]+ minute(s|) ago$", publishedDate): + publishedDate = datetime.now() - timedelta(hours=int(timeNumbers[0])) + elif re.match("^[0-9]+ hour(s|), [0-9]+ minute(s|) ago$", publishedDate): timeNumbers = re.findall(r'\d+', publishedDate) publishedDate = datetime.now()\ - timedelta(hours=int(timeNumbers[0]))\ - timedelta(minutes=int(timeNumbers[1])) elif re.match("^[0-9]+ day(s|) ago$", publishedDate): timeNumbers = re.findall(r'\d+', publishedDate) - publishedDate = datetime.now()\ - - timedelta(days=int(timeNumbers[0])) + publishedDate = datetime.now() - timedelta(days=int(timeNumbers[0])) else: try: - # FIXME use params['language'] to parse either mm/dd or dd/mm publishedDate = parser.parse(publishedDate, dayfirst=False) except TypeError: - # FIXME publishedDate = datetime.now() # append result diff --git a/searx/tests/engines/test_bing_news.py b/searx/tests/engines/test_bing_news.py new file mode 100644 index 000000000..f22b80e87 --- /dev/null +++ b/searx/tests/engines/test_bing_news.py @@ -0,0 +1,236 @@ +from collections import defaultdict +import mock +from searx.engines import bing_news +from searx.testing import SearxTestCase + + +class TestBingNewsEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + params = bing_news.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('bing.com', params['url']) + self.assertIn('fr', params['url']) + self.assertIn('_FP', params['cookies']) + self.assertIn('en', params['cookies']['_FP']) + + dicto['language'] = 'all' + params = bing_news.request(query, dicto) + self.assertIn('en', params['url']) + self.assertIn('_FP', params['cookies']) + self.assertIn('en', params['cookies']['_FP']) + + def test_response(self): + self.assertRaises(AttributeError, bing_news.response, None) + self.assertRaises(AttributeError, bing_news.response, []) + self.assertRaises(AttributeError, bing_news.response, '') + self.assertRaises(AttributeError, bing_news.response, '[]') + + response = mock.Mock(content='') + self.assertEqual(bing_news.response(response), []) + + response = mock.Mock(content='') + self.assertEqual(bing_news.response(response), []) + + html = """ +
+ """ + response = mock.Mock(content=html) + results = bing_news.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'Title') + self.assertEqual(results[0]['url'], 'http://url.of.article/') + self.assertEqual(results[0]['content'], 'Article Content') + + html = """ + +