2014-03-04 12:11:04 +00:00
|
|
|
#!/usr/bin/env python
|
|
|
|
|
|
|
|
from urllib import urlencode
|
|
|
|
from json import loads
|
2014-03-18 12:19:50 +00:00
|
|
|
from dateutil import parser
|
2014-03-15 19:20:41 +00:00
|
|
|
from datetime import datetime
|
2014-03-04 12:11:04 +00:00
|
|
|
|
|
|
|
categories = ['news']
|
|
|
|
|
|
|
|
url = 'https://ajax.googleapis.com/'
|
2014-03-04 13:20:29 +00:00
|
|
|
search_url = url + 'ajax/services/search/news?v=2.0&start={offset}&rsz=large&safe=off&filter=off&{query}&hl={language}' # noqa
|
2014-03-04 12:11:04 +00:00
|
|
|
|
|
|
|
paging = True
|
|
|
|
language_support = True
|
|
|
|
|
|
|
|
|
|
|
|
def request(query, params):
|
|
|
|
offset = (params['pageno'] - 1) * 8
|
|
|
|
language = 'en-US'
|
|
|
|
if params['language'] != 'all':
|
|
|
|
language = params['language'].replace('_', '-')
|
|
|
|
params['url'] = search_url.format(offset=offset,
|
|
|
|
query=urlencode({'q': query}),
|
|
|
|
language=language)
|
|
|
|
return params
|
|
|
|
|
|
|
|
|
|
|
|
def response(resp):
|
|
|
|
results = []
|
|
|
|
search_res = loads(resp.text)
|
|
|
|
|
|
|
|
if not search_res.get('responseData', {}).get('results'):
|
|
|
|
return []
|
|
|
|
|
|
|
|
for result in search_res['responseData']['results']:
|
2014-03-18 12:19:50 +00:00
|
|
|
|
|
|
|
# Mon, 10 Mar 2014 16:26:15 -0700
|
|
|
|
publishedDate = parser.parse(result['publishedDate'])
|
2014-03-14 08:55:04 +00:00
|
|
|
|
2014-03-04 12:11:04 +00:00
|
|
|
results.append({'url': result['unescapedUrl'],
|
|
|
|
'title': result['titleNoFormatting'],
|
2014-03-15 19:20:41 +00:00
|
|
|
'publishedDate': publishedDate,
|
2014-03-04 12:11:04 +00:00
|
|
|
'content': result['content']})
|
|
|
|
return results
|