searxngRebrandZaclys/searx/engines/github.py

62 lines
1.3 KiB
Python
Raw Permalink Normal View History

# SPDX-License-Identifier: AGPL-3.0-or-later
"""
Github (IT)
"""
2014-09-02 15:37:47 +00:00
2013-10-20 19:53:49 +00:00
from json import loads
from urllib.parse import urlencode
2013-10-20 19:53:49 +00:00
# about
about = {
"website": 'https://github.com/',
"wikidata_id": 'Q364',
"official_api_documentation": 'https://developer.github.com/v3/',
"use_official_api": True,
"require_api_key": False,
"results": 'JSON',
}
2014-09-02 15:37:47 +00:00
# engine dependent config
2021-12-22 15:58:52 +00:00
categories = ['it', 'repos']
2013-10-20 19:53:49 +00:00
2014-09-02 15:37:47 +00:00
# search-url
2014-01-20 01:31:20 +00:00
search_url = 'https://api.github.com/search/repositories?sort=stars&order=desc&{query}' # noqa
accept_header = 'application/vnd.github.preview.text-match+json'
2013-10-20 19:53:49 +00:00
2014-09-02 15:37:47 +00:00
# do search-request
2013-10-20 19:53:49 +00:00
def request(query, params):
2013-10-23 21:55:37 +00:00
params['url'] = search_url.format(query=urlencode({'q': query}))
2014-09-02 15:37:47 +00:00
2014-01-20 01:31:20 +00:00
params['headers']['Accept'] = accept_header
2014-09-02 15:37:47 +00:00
2013-10-20 19:53:49 +00:00
return params
2014-09-02 15:37:47 +00:00
# get response from search-request
2013-10-20 19:53:49 +00:00
def response(resp):
results = []
2014-09-02 15:37:47 +00:00
2013-10-20 19:53:49 +00:00
search_res = loads(resp.text)
2014-09-02 15:37:47 +00:00
# check if items are received
if 'items' not in search_res:
2014-09-02 15:37:47 +00:00
return []
# parse results
2013-10-20 19:53:49 +00:00
for res in search_res['items']:
title = res['name']
url = res['html_url']
2014-09-02 15:37:47 +00:00
if res['description']:
content = res['description'][:500]
else:
content = ''
2014-09-02 15:37:47 +00:00
# append result
results.append({'url': url, 'title': title, 'content': content})
2014-09-02 15:37:47 +00:00
# return results
2013-10-20 19:53:49 +00:00
return results