forked from zaclys/searxng
Add Reddit search engine
This commit is contained in:
parent
e5677ae6b6
commit
d026a97e42
74
searx/engines/reddit.py
Normal file
74
searx/engines/reddit.py
Normal file
@ -0,0 +1,74 @@
|
||||
"""
|
||||
Reddit
|
||||
|
||||
@website https://www.reddit.com/
|
||||
@provide-api yes (https://www.reddit.com/dev/api)
|
||||
|
||||
@using-api yes
|
||||
@results JSON
|
||||
@stable yes
|
||||
@parse url, title, content, thumbnail, publishedDate
|
||||
"""
|
||||
|
||||
import json
|
||||
from cgi import escape
|
||||
from urllib import urlencode
|
||||
from urlparse import urlparse
|
||||
from datetime import datetime
|
||||
|
||||
# engine dependent config
|
||||
categories = ['general', 'images', 'news', 'social media']
|
||||
page_size = 25
|
||||
|
||||
# search-url
|
||||
search_url = 'https://www.reddit.com/search.json?{query}'
|
||||
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
query = urlencode({'q': query,
|
||||
'limit': page_size})
|
||||
params['url'] = search_url.format(query=query)
|
||||
|
||||
return params
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
img_results = []
|
||||
text_results = []
|
||||
|
||||
search_results = json.loads(resp.text)
|
||||
|
||||
# return empty array if there are no results
|
||||
if 'data' not in search_results:
|
||||
return []
|
||||
|
||||
posts = search_results.get('data', {}).get('children', [])
|
||||
|
||||
# process results
|
||||
for post in posts:
|
||||
data = post['data']
|
||||
|
||||
# extract post information
|
||||
params = {
|
||||
'url': data['url'],
|
||||
'title': data['title']
|
||||
}
|
||||
|
||||
# if thumbnail field contains a valid URL, we need to change template
|
||||
thumbnail = data['thumbnail']
|
||||
url_info = urlparse(thumbnail)
|
||||
# netloc & path
|
||||
if url_info[1] != '' and url_info[2] != '':
|
||||
params['thumbnail_src'] = thumbnail
|
||||
params['template'] = 'images.html'
|
||||
img_results.append(params)
|
||||
else:
|
||||
created = datetime.fromtimestamp(data['created_utc'])
|
||||
params['content'] = escape(data['selftext'])
|
||||
params['publishedDate'] = created
|
||||
text_results.append(params)
|
||||
|
||||
# show images first and text results second
|
||||
return img_results + text_results
|
@ -213,6 +213,13 @@ engines:
|
||||
shortcut : qws
|
||||
categories : social media
|
||||
|
||||
- name : reddit
|
||||
engine : reddit
|
||||
shortcut : re
|
||||
page_size : 25
|
||||
timeout : 10.0
|
||||
disabled : True
|
||||
|
||||
- name : kickass
|
||||
engine : kickass
|
||||
shortcut : ka
|
||||
|
67
tests/unit/engines/test_reddit.py
Normal file
67
tests/unit/engines/test_reddit.py
Normal file
@ -0,0 +1,67 @@
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import reddit
|
||||
from searx.testing import SearxTestCase
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
class TestRedditEngine(SearxTestCase):
|
||||
|
||||
def test_request(self):
|
||||
query = 'test_query'
|
||||
dic = defaultdict(dict)
|
||||
params = reddit.request(query, dic)
|
||||
self.assertTrue('url' in params)
|
||||
self.assertTrue(query in params['url'])
|
||||
self.assertTrue('reddit.com' in params['url'])
|
||||
|
||||
def test_response(self):
|
||||
resp = mock.Mock(text='{}')
|
||||
self.assertEqual(reddit.response(resp), [])
|
||||
|
||||
json = """
|
||||
{
|
||||
"kind": "Listing",
|
||||
"data": {
|
||||
"children": [{
|
||||
"data": {
|
||||
"url": "http://google.com/",
|
||||
"title": "Title number one",
|
||||
"selftext": "Sample",
|
||||
"created_utc": 1401219957.0,
|
||||
"thumbnail": "http://image.com/picture.jpg"
|
||||
}
|
||||
}, {
|
||||
"data": {
|
||||
"url": "https://reddit.com/",
|
||||
"title": "Title number two",
|
||||
"selftext": "Dominus vobiscum",
|
||||
"created_utc": 1438792533.0,
|
||||
"thumbnail": "self"
|
||||
}
|
||||
}]
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
resp = mock.Mock(text=json)
|
||||
results = reddit.response(resp)
|
||||
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(type(results), list)
|
||||
|
||||
# testing first result (picture)
|
||||
r = results[0]
|
||||
self.assertEqual(r['url'], 'http://google.com/')
|
||||
self.assertEqual(r['title'], 'Title number one')
|
||||
self.assertEqual(r['template'], 'images.html')
|
||||
self.assertEqual(r['thumbnail_src'], 'http://image.com/picture.jpg')
|
||||
|
||||
# testing second result (self-post)
|
||||
r = results[1]
|
||||
self.assertEqual(r['url'], 'https://reddit.com/')
|
||||
self.assertEqual(r['title'], 'Title number two')
|
||||
self.assertEqual(r['content'], 'Dominus vobiscum')
|
||||
created = datetime.fromtimestamp(1438792533.0)
|
||||
self.assertEqual(r['publishedDate'], created)
|
||||
self.assertTrue('thumbnail_src' not in r)
|
Loading…
Reference in New Issue
Block a user