forked from zaclys/searxng
Merge pull request #15 from return42/add-springer
Add a search engine for Springer Nature
This commit is contained in:
commit
d01741c9a2
1
Makefile
1
Makefile
|
@ -194,6 +194,7 @@ PYLINT_FILES=\
|
||||||
searx/engines/meilisearch.py \
|
searx/engines/meilisearch.py \
|
||||||
searx/engines/solidtorrents.py \
|
searx/engines/solidtorrents.py \
|
||||||
searx/engines/solr.py \
|
searx/engines/solr.py \
|
||||||
|
searx/engines/springer.py \
|
||||||
searx/engines/google_scholar.py \
|
searx/engines/google_scholar.py \
|
||||||
searx/engines/yahoo_news.py \
|
searx/engines/yahoo_news.py \
|
||||||
searx/engines/apkmirror.py \
|
searx/engines/apkmirror.py \
|
||||||
|
|
|
@ -0,0 +1,74 @@
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
"""Springer Nature (science)
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# pylint: disable=missing-function-docstring
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from json import loads
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
|
from searx import logger
|
||||||
|
from searx.exceptions import SearxEngineAPIException
|
||||||
|
|
||||||
|
logger = logger.getChild('Springer Nature engine')
|
||||||
|
|
||||||
|
about = {
|
||||||
|
"website": 'https://www.springernature.com/',
|
||||||
|
"wikidata_id": 'Q21096327',
|
||||||
|
"official_api_documentation": 'https://dev.springernature.com/',
|
||||||
|
"use_official_api": True,
|
||||||
|
"require_api_key": True,
|
||||||
|
"results": 'JSON',
|
||||||
|
}
|
||||||
|
|
||||||
|
categories = ['science']
|
||||||
|
paging = True
|
||||||
|
nb_per_page = 10
|
||||||
|
api_key = 'unset'
|
||||||
|
|
||||||
|
base_url = 'https://api.springernature.com/metadata/json?'
|
||||||
|
|
||||||
|
def request(query, params):
|
||||||
|
if api_key == 'unset':
|
||||||
|
raise SearxEngineAPIException('missing Springer-Nature API key')
|
||||||
|
args = urlencode({
|
||||||
|
'q' : query,
|
||||||
|
's' : nb_per_page * (params['pageno'] - 1),
|
||||||
|
'p' : nb_per_page,
|
||||||
|
'api_key' : api_key
|
||||||
|
})
|
||||||
|
params['url'] = base_url + args
|
||||||
|
logger.debug("query_url --> %s", params['url'])
|
||||||
|
return params
|
||||||
|
|
||||||
|
|
||||||
|
def response(resp):
|
||||||
|
results = []
|
||||||
|
json_data = loads(resp.text)
|
||||||
|
|
||||||
|
for record in json_data['records']:
|
||||||
|
content = record['abstract'][0:500]
|
||||||
|
if len(record['abstract']) > len(content):
|
||||||
|
content += "..."
|
||||||
|
published = datetime.strptime(record['publicationDate'], '%Y-%m-%d')
|
||||||
|
|
||||||
|
metadata = [record[x] for x in [
|
||||||
|
'publicationName',
|
||||||
|
'identifier',
|
||||||
|
'contentType',
|
||||||
|
] if record.get(x) is not None]
|
||||||
|
|
||||||
|
metadata = ' / '.join(metadata)
|
||||||
|
if record.get('startingPage') and record.get('endingPage') is not None:
|
||||||
|
metadata += " (%(startingPage)s-%(endingPage)s)" % record
|
||||||
|
|
||||||
|
results.append({
|
||||||
|
'title': record['title'],
|
||||||
|
'url': record['url'][0]['value'].replace('http://', 'https://', 1),
|
||||||
|
'content' : content,
|
||||||
|
'publishedDate' : published,
|
||||||
|
'metadata' : metadata
|
||||||
|
})
|
||||||
|
return results
|
|
@ -966,6 +966,16 @@ engines:
|
||||||
# query_fields : '' # query fields
|
# query_fields : '' # query fields
|
||||||
# enable_http : True
|
# enable_http : True
|
||||||
|
|
||||||
|
- name : springer nature
|
||||||
|
engine : springer
|
||||||
|
# get your API key from: https://dev.springernature.com/signup
|
||||||
|
# api_key : "a69685087d07eca9f13db62f65b8f601" # working API key, for test & debug
|
||||||
|
# set api_key and comment out disabled ..
|
||||||
|
disabled: True
|
||||||
|
shortcut : springer
|
||||||
|
categories : science
|
||||||
|
timeout : 6.0
|
||||||
|
|
||||||
- name : startpage
|
- name : startpage
|
||||||
engine : startpage
|
engine : startpage
|
||||||
shortcut : sp
|
shortcut : sp
|
||||||
|
|
|
@ -22,6 +22,7 @@
|
||||||
{%- if result.publishedDate %}<time class="text-muted" datetime="{{ result.pubdate }}" >{{ result.publishedDate }}</time>{% endif -%}
|
{%- if result.publishedDate %}<time class="text-muted" datetime="{{ result.pubdate }}" >{{ result.publishedDate }}</time>{% endif -%}
|
||||||
{%- if result.magnetlink %}<small> • {{ result_link(result.magnetlink, icon('magnet') + _('magnet link'), "magnetlink", id) }}</small>{% endif -%}
|
{%- if result.magnetlink %}<small> • {{ result_link(result.magnetlink, icon('magnet') + _('magnet link'), "magnetlink", id) }}</small>{% endif -%}
|
||||||
{%- if result.torrentfile %}<small> • {{ result_link(result.torrentfile, icon('download-alt') + _('torrent file'), "torrentfile", id) }}</small>{% endif -%}
|
{%- if result.torrentfile %}<small> • {{ result_link(result.torrentfile, icon('download-alt') + _('torrent file'), "torrentfile", id) }}</small>{% endif -%}
|
||||||
|
{%- if result.metadata %} <div class="highlight">{{ result.metadata|safe }}</div>{% endif -%}
|
||||||
{%- endmacro %}
|
{%- endmacro %}
|
||||||
|
|
||||||
<!-- Draw result footer -->
|
<!-- Draw result footer -->
|
||||||
|
|
|
@ -35,6 +35,7 @@
|
||||||
<!-- Draw result sub header -->
|
<!-- Draw result sub header -->
|
||||||
{%- macro result_sub_header(result) -%}
|
{%- macro result_sub_header(result) -%}
|
||||||
{% if result.publishedDate %}<time class="published_date" datetime="{{ result.pubdate }}" >{{ result.publishedDate }}</time>{% endif %}
|
{% if result.publishedDate %}<time class="published_date" datetime="{{ result.pubdate }}" >{{ result.publishedDate }}</time>{% endif %}
|
||||||
|
{%- if result.metadata %} <div class="highlight">{{ result.metadata|safe }}</div>{% endif -%}
|
||||||
{%- endmacro -%}
|
{%- endmacro -%}
|
||||||
|
|
||||||
<!-- Draw result sub footer -->
|
<!-- Draw result sub footer -->
|
||||||
|
|
Loading…
Reference in New Issue