searxng/searx/engines/microsoft_academic.py

74 lines
1.7 KiB
Python
Raw Normal View History

# SPDX-License-Identifier: AGPL-3.0-or-later
2018-02-17 20:36:34 +00:00
"""
Microsoft Academic (Science)
2018-02-17 20:36:34 +00:00
"""
2021-03-15 19:21:28 +00:00
from json import dumps, loads
2018-02-17 20:36:34 +00:00
from searx.utils import html_to_text
# about
about = {
"website": 'https://academic.microsoft.com',
"wikidata_id": 'Q28136779',
"official_api_documentation": 'http://ma-graph.org/',
"use_official_api": False,
"require_api_key": False,
"results": 'JSON',
}
2018-02-17 20:36:34 +00:00
categories = ['images']
paging = True
2021-03-15 19:21:28 +00:00
search_url = 'https://academic.microsoft.com/api/search'
_paper_url = 'https://academic.microsoft.com/paper/{id}/reference'
2018-02-17 20:36:34 +00:00
def request(query, params):
2021-03-15 19:21:28 +00:00
params['url'] = search_url
2018-02-17 20:36:34 +00:00
params['method'] = 'POST'
2021-03-15 19:21:28 +00:00
params['headers']['content-type'] = 'application/json; charset=utf-8'
params['data'] = dumps({
'query': query,
'queryExpression': '',
'filters': [],
'orderBy': 0,
'skip': (params['pageno'] - 1) * 10,
'sortAscending': True,
'take': 10,
'includeCitationContexts': False,
'profileId': '',
})
2018-02-17 20:36:34 +00:00
return params
def response(resp):
results = []
response_data = loads(resp.text)
2020-01-02 21:29:55 +00:00
if not response_data:
return results
2018-02-17 20:36:34 +00:00
for result in response_data.get('pr', {}):
2021-03-15 19:21:28 +00:00
if 'dn' not in result['paper']:
continue
title = result['paper']['dn']
content = _get_content(result['paper'])
url = _paper_url.format(id=result['paper']['id'])
2018-02-17 20:36:34 +00:00
results.append({
'url': url,
'title': html_to_text(title),
'content': html_to_text(content),
})
return results
def _get_content(result):
2021-03-15 19:21:28 +00:00
if 'd' in result:
content = result['d']
2018-02-17 20:36:34 +00:00
if len(content) > 300:
return content[:300] + '...'
return content
return ''