mirror of
https://github.com/searxng/searxng.git
synced 2024-12-23 01:36:29 +00:00
[fix] issues reported by pylint
Fix pylint issues from commit (3d96a983
)
[format.python] initial formatting of the python code
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
parent
3d96a9839a
commit
d84226bf63
9 changed files with 55 additions and 21 deletions
|
@ -6,13 +6,13 @@
|
|||
from urllib.parse import urlencode
|
||||
from lxml import html
|
||||
from json import loads
|
||||
from searx.utils import match_language
|
||||
|
||||
from searx.utils import match_language
|
||||
from searx.engines.bing import language_aliases
|
||||
from searx.engines.bing import (
|
||||
from searx.engines.bing import ( # pylint: disable=unused-import
|
||||
_fetch_supported_languages,
|
||||
supported_languages_url,
|
||||
) # NOQA # pylint: disable=unused-import
|
||||
)
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
@ -34,7 +34,15 @@ number_of_results = 28
|
|||
|
||||
# search-url
|
||||
base_url = 'https://www.bing.com/'
|
||||
search_string = 'images/search' '?{query}' '&count={count}' '&first={first}' '&tsc=ImageHoverTitle'
|
||||
search_string = (
|
||||
# fmt: off
|
||||
'images/search'
|
||||
'?{query}'
|
||||
'&count={count}'
|
||||
'&first={first}'
|
||||
'&tsc=ImageHoverTitle'
|
||||
# fmt: on
|
||||
)
|
||||
time_range_string = '&qft=+filterui:age-lt{interval}'
|
||||
time_range_dict = {'day': '1440', 'week': '10080', 'month': '43200', 'year': '525600'}
|
||||
|
||||
|
|
|
@ -6,15 +6,15 @@
|
|||
from json import loads
|
||||
from lxml import html
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import match_language
|
||||
|
||||
from searx.utils import match_language
|
||||
from searx.engines.bing import language_aliases
|
||||
from searx.engines.bing import (
|
||||
|
||||
from searx.engines.bing import ( # pylint: disable=unused-import
|
||||
_fetch_supported_languages,
|
||||
supported_languages_url,
|
||||
) # NOQA # pylint: disable=unused-import
|
||||
)
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": 'https://www.bing.com/videos',
|
||||
"wikidata_id": 'Q4914152',
|
||||
|
@ -31,7 +31,16 @@ time_range_support = True
|
|||
number_of_results = 28
|
||||
|
||||
base_url = 'https://www.bing.com/'
|
||||
search_string = 'videos/search' '?{query}' '&count={count}' '&first={first}' '&scope=video' '&FORM=QBLH'
|
||||
search_string = (
|
||||
# fmt: off
|
||||
'videos/search'
|
||||
'?{query}'
|
||||
'&count={count}'
|
||||
'&first={first}'
|
||||
'&scope=video'
|
||||
'&FORM=QBLH'
|
||||
# fmt: on
|
||||
)
|
||||
time_range_string = '&qft=+filterui:videoage-lt{interval}'
|
||||
time_range_dict = {'day': '1440', 'week': '10080', 'month': '43200', 'year': '525600'}
|
||||
|
||||
|
|
|
@ -25,9 +25,14 @@ number_of_results = 5
|
|||
# search-url
|
||||
# Doku is OpenSearch compatible
|
||||
base_url = 'http://localhost:8090'
|
||||
search_url = '/?do=search' '&{query}'
|
||||
# TODO '&startRecord={offset}'\
|
||||
# TODO '&maximumRecords={limit}'\
|
||||
search_url = (
|
||||
# fmt: off
|
||||
'/?do=search'
|
||||
'&{query}'
|
||||
# fmt: on
|
||||
)
|
||||
# TODO '&startRecord={offset}'
|
||||
# TODO '&maximumRecords={limit}'
|
||||
|
||||
|
||||
# do search-request
|
||||
|
|
|
@ -10,10 +10,10 @@ from lxml import html
|
|||
|
||||
from searx.data import WIKIDATA_UNITS
|
||||
from searx.engines.duckduckgo import language_aliases
|
||||
from searx.engines.duckduckgo import (
|
||||
from searx.engines.duckduckgo import ( # pylint: disable=unused-import
|
||||
_fetch_supported_languages,
|
||||
supported_languages_url,
|
||||
) # NOQA # pylint: disable=unused-import
|
||||
)
|
||||
from searx.utils import extract_text, html_to_text, match_language, get_string_replaces_function
|
||||
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
|
||||
|
||||
|
|
|
@ -7,10 +7,10 @@ from json import loads
|
|||
from urllib.parse import urlencode
|
||||
from searx.exceptions import SearxEngineAPIException
|
||||
from searx.engines.duckduckgo import get_region_code
|
||||
from searx.engines.duckduckgo import (
|
||||
from searx.engines.duckduckgo import ( # pylint: disable=unused-import
|
||||
_fetch_supported_languages,
|
||||
supported_languages_url,
|
||||
) # NOQA # pylint: disable=unused-import
|
||||
)
|
||||
from searx.network import get
|
||||
|
||||
# about
|
||||
|
|
|
@ -22,7 +22,14 @@ paging = False
|
|||
safesearch = True
|
||||
|
||||
base_url = 'https://www.etools.ch'
|
||||
search_path = '/searchAdvancedSubmit.do' '?query={search_term}' '&pageResults=20' '&safeSearch={safesearch}'
|
||||
search_path = (
|
||||
# fmt: off
|
||||
'/searchAdvancedSubmit.do'
|
||||
'?query={search_term}'
|
||||
'&pageResults=20'
|
||||
'&safeSearch={safesearch}'
|
||||
# fmt: on
|
||||
)
|
||||
|
||||
|
||||
def request(query, params):
|
||||
|
|
|
@ -14,10 +14,10 @@ from searx.data import WIKIDATA_UNITS
|
|||
from searx.network import post, get
|
||||
from searx.utils import match_language, searx_useragent, get_string_replaces_function
|
||||
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
|
||||
from searx.engines.wikipedia import (
|
||||
from searx.engines.wikipedia import ( # pylint: disable=unused-import
|
||||
_fetch_supported_languages,
|
||||
supported_languages_url,
|
||||
) # NOQA # pylint: disable=unused-import
|
||||
)
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
|
|
@ -39,7 +39,12 @@ paging = True
|
|||
categories = ['news']
|
||||
|
||||
# search-url
|
||||
search_url = 'https://news.search.yahoo.com/search' '?{query}&b={offset}'
|
||||
search_url = (
|
||||
# fmt: off
|
||||
'https://news.search.yahoo.com/search'
|
||||
'?{query}&b={offset}'
|
||||
# fmt: on
|
||||
)
|
||||
|
||||
AGO_RE = re.compile(r'([0-9]+)\s*(year|month|week|day|minute|hour)')
|
||||
AGO_TIMEDELTA = {
|
||||
|
|
|
@ -22,7 +22,7 @@ class TestWebUtils(SearxTestCase):
|
|||
self.assertEqual(webutils.highlight_content('', None), None)
|
||||
self.assertEqual(webutils.highlight_content(False, None), None)
|
||||
|
||||
contents = ['<html></html>' 'not<']
|
||||
contents = ['<html></html>not<']
|
||||
for content in contents:
|
||||
self.assertEqual(webutils.highlight_content(content, None), content)
|
||||
|
||||
|
|
Loading…
Reference in a new issue