From 52ad49ccba389346e6216dc708891cbea2b4941d Mon Sep 17 00:00:00 2001 From: Thomas Pointhuber Date: Wed, 3 Sep 2014 11:40:29 +0200 Subject: [PATCH] using general mediawiki-engine * writing general mediawiki-engine * using this engine for wikipedia * using this engine for uncyclopedia --- searx/engines/mediawiki.py | 65 ++++++++++++++++++++++++++++++------ searx/engines/wikipedia.py | 67 -------------------------------------- searx/settings.yml | 9 ++--- 3 files changed, 60 insertions(+), 81 deletions(-) delete mode 100644 searx/engines/wikipedia.py diff --git a/searx/engines/mediawiki.py b/searx/engines/mediawiki.py index f8cfb9afa..3c7fd4dcb 100644 --- a/searx/engines/mediawiki.py +++ b/searx/engines/mediawiki.py @@ -1,22 +1,67 @@ +## Wikipedia (Web) +# +# @website http://www.wikipedia.org +# @provide-api yes (http://www.mediawiki.org/wiki/API:Search) +# +# @using-api yes +# @results JSON +# @stable yes +# @parse url, title +# +# @todo content + from json import loads from urllib import urlencode, quote -url = 'https://en.wikipedia.org/' - -search_url = url + 'w/api.php?action=query&list=search&{query}&srprop=timestamp&format=json&sroffset={offset}' # noqa - -number_of_results = 10 +# engine dependent config +categories = ['general'] +language_support = True +paging = True +number_of_results = 1 + +# search-url +base_url = 'https://{language}.wikipedia.org/' +search_url = base_url + 'w/api.php?action=query&list=search&{query}&srprop=timestamp&format=json&sroffset={offset}&srlimit={limit}' # noqa +# do search-request def request(query, params): - offset = (params['pageno'] - 1) * 10 + offset = (params['pageno'] - 1) * number_of_results + + if params['language'] == 'all': + language = 'en' + else: + language = params['language'].split('_')[0] + + # write search-language back to params, required in response + params['language'] = language + params['url'] = search_url.format(query=urlencode({'srsearch': query}), - offset=offset) + offset=offset, + limit=number_of_results, + language=language) + return params +# get response from search-request def response(resp): + results = [] + search_results = loads(resp.text) - res = search_results.get('query', {}).get('search', []) - return [{'url': url + 'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8')), # noqa - 'title': result['title']} for result in res[:int(number_of_results)]] + + # return empty array if there are no results + if not search_results.get('query', {}).get('search'): + return [] + + # parse results + for result in search_results['query']['search']: + url = base_url.format(language=resp.search_params['language']) + 'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8')) + + # append result + results.append({'url': url, + 'title': result['title'], + 'content': ''}) + + # return results + return results diff --git a/searx/engines/wikipedia.py b/searx/engines/wikipedia.py deleted file mode 100644 index ce9429776..000000000 --- a/searx/engines/wikipedia.py +++ /dev/null @@ -1,67 +0,0 @@ -## Wikipedia (Web) -# -# @website http://www.wikipedia.org -# @provide-api yes (http://www.mediawiki.org/wiki/API:Search) -# -# @using-api yes -# @results JSON -# @stable yes -# @parse url, title -# -# @todo content - -from json import loads -from urllib import urlencode, quote - -# engine dependent config -categories = ['general'] -language_support = True -paging = True -number_of_results = 1 - -# search-url -url = 'https://{language}.wikipedia.org/' -search_url = url + 'w/api.php?action=query&list=search&{query}&srprop=timestamp&format=json&sroffset={offset}&srlimit={limit}' # noqa - - -# do search-request -def request(query, params): - offset = (params['pageno'] - 1) * number_of_results - - if params['language'] == 'all': - language = 'en' - else: - language = params['language'].split('_')[0] - - # write search-language back to params, required in response - params['language'] = language - - params['url'] = search_url.format(query=urlencode({'srsearch': query}), - offset=offset, - limit=number_of_results, - language=language) - - return params - - -# get response from search-request -def response(resp): - results = [] - - search_results = loads(resp.text) - - # return empty array if there are no results - if not search_results.get('query', {}).get('search'): - return [] - - # parse results - for result in search_results['query']['search']: - res_url = url.format(language=resp.search_params['language']) + 'wiki/' + quote(result['title'].replace(' ', '_').encode('utf-8')) - - # append result - results.append({'url': res_url, - 'title': result['title'], - 'content': ''}) - - # return results - return results diff --git a/searx/settings.yml b/searx/settings.yml index 552a5f7b9..7d2a4387d 100644 --- a/searx/settings.yml +++ b/searx/settings.yml @@ -10,9 +10,10 @@ server: engines: - name : wikipedia - engine : wikipedia + engine : mediawiki shortcut : wp -# number_of_results : 1 # default is 1 + base_url : 'https://{language}.wikipedia.org/' + number_of_results : 1 - name : bing engine : bing @@ -108,9 +109,9 @@ engines: # maybe in a fun category # - name : uncyclopedia # engine : mediawiki -# categories : general # shortcut : unc -# url : https://uncyclopedia.wikia.com/ +# base_url : https://uncyclopedia.wikia.com/ +# number_of_results : 5 # tmp suspended - too slow, too many errors # - name : urbandictionary