forked from zaclys/searxng
Hopefully fix code style errors
This commit is contained in:
parent
3ec35a06f7
commit
fb364ffae7
|
@ -28,14 +28,11 @@ xpath_title = './/td[3]/a[last()]'
|
|||
xpath_torrent_links = './/td[3]/a'
|
||||
xpath_filesize = './/td[4]/text()'
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
query = urlencode({'keyword': query})
|
||||
params['url'] = search_url.format(query=query, offset=params['pageno'])
|
||||
return params
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
results = []
|
||||
dom = html.fromstring(resp.text)
|
||||
|
@ -46,23 +43,17 @@ def response(resp):
|
|||
magnet_link = "magnet:?xt=urn:btih:{}&tr=http://tracker.acgsou.com:2710/announce"
|
||||
torrent_link = ""
|
||||
|
||||
# category in which our torrent belongs
|
||||
try:
|
||||
category = extract_text(result.xpath(xpath_category)[0])
|
||||
except:
|
||||
pass
|
||||
|
||||
# torrent title
|
||||
page_a = result.xpath(xpath_title)[0]
|
||||
title = extract_text(page_a)
|
||||
|
||||
# link to the page
|
||||
href = base_url + page_a.attrib.get('href')
|
||||
|
||||
#magnet link
|
||||
|
||||
magnet_link = magnet_link.format(page_a.attrib.get('href')[5:-5])
|
||||
|
||||
# let's try to calculate the torrent size
|
||||
try:
|
||||
filesize_info = result.xpath(xpath_filesize)[0]
|
||||
filesize = filesize_info[:-2]
|
||||
|
@ -70,16 +61,14 @@ def response(resp):
|
|||
filesize = get_torrent_size(filesize, filesize_multiplier)
|
||||
except :
|
||||
pass
|
||||
|
||||
# content string contains all information not included into template
|
||||
#I didn't add download/seed/leech count since as I figured out they are generated randowmly everytime
|
||||
content = 'Category: "{category}".'
|
||||
content = content.format(category=category)
|
||||
|
||||
|
||||
results.append({'url': href,
|
||||
'title': title,
|
||||
'content': content,
|
||||
'filesize': filesize,
|
||||
'magnetlink': magnet_link,
|
||||
'template': 'torrent.html'})
|
||||
|
||||
return results
|
||||
|
|
Loading…
Reference in New Issue