mirror of
https://github.com/searxng/searxng.git
synced 2024-11-28 21:52:14 +00:00
[feat] hostname replace plugin: possibility to prioritize certain websites
Co-authored-by: Markus Heiser <markus.heiser@darmarit.de>
This commit is contained in:
parent
d4c3d30995
commit
3bec04079c
7 changed files with 117 additions and 76 deletions
|
@ -96,7 +96,7 @@ Modify the ``/etc/searxng/settings.yml`` to your needs:
|
||||||
|
|
||||||
.. literalinclude:: ../../utils/templates/etc/searxng/settings.yml
|
.. literalinclude:: ../../utils/templates/etc/searxng/settings.yml
|
||||||
:language: yaml
|
:language: yaml
|
||||||
:end-before: # hostname_replace:
|
:end-before: # hostnames:
|
||||||
|
|
||||||
To see the entire file jump to :origin:`utils/templates/etc/searxng/settings.yml`
|
To see the entire file jump to :origin:`utils/templates/etc/searxng/settings.yml`
|
||||||
|
|
||||||
|
@ -104,7 +104,7 @@ Modify the ``/etc/searxng/settings.yml`` to your needs:
|
||||||
|
|
||||||
.. literalinclude:: ../../searx/settings.yml
|
.. literalinclude:: ../../searx/settings.yml
|
||||||
:language: yaml
|
:language: yaml
|
||||||
:end-before: # hostname_replace:
|
:end-before: # hostnames:
|
||||||
|
|
||||||
To see the entire file jump to :origin:`searx/settings.yml`
|
To see the entire file jump to :origin:`searx/settings.yml`
|
||||||
|
|
||||||
|
|
|
@ -103,14 +103,14 @@ Parameters
|
||||||
|
|
||||||
.. disabled by default
|
.. disabled by default
|
||||||
|
|
||||||
``Hostname_replace``, ``Open_Access_DOI_rewrite``,
|
``Hostnames_plugin``, ``Open_Access_DOI_rewrite``,
|
||||||
``Vim-like_hotkeys``, ``Tor_check_plugin``
|
``Vim-like_hotkeys``, ``Tor_check_plugin``
|
||||||
|
|
||||||
``disabled_plugins``: optional
|
``disabled_plugins``: optional
|
||||||
List of disabled plugins.
|
List of disabled plugins.
|
||||||
|
|
||||||
:default:
|
:default:
|
||||||
``Hostname_replace``, ``Open_Access_DOI_rewrite``,
|
``Hostnames_plugin``, ``Open_Access_DOI_rewrite``,
|
||||||
``Vim-like_hotkeys``, ``Tor_check_plugin``
|
``Vim-like_hotkeys``, ``Tor_check_plugin``
|
||||||
|
|
||||||
:values:
|
:values:
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
|
||||||
# pylint: disable=missing-module-docstring
|
|
||||||
|
|
||||||
import re
|
|
||||||
from urllib.parse import urlunparse, urlparse
|
|
||||||
|
|
||||||
from flask_babel import gettext
|
|
||||||
|
|
||||||
from searx import settings
|
|
||||||
from searx.plugins import logger
|
|
||||||
|
|
||||||
name = gettext('Hostname replace')
|
|
||||||
description = gettext('Rewrite result hostnames or remove results based on the hostname')
|
|
||||||
default_on = False
|
|
||||||
preference_section = 'general'
|
|
||||||
|
|
||||||
plugin_id = 'hostname_replace'
|
|
||||||
|
|
||||||
replacements = {re.compile(p): r for (p, r) in settings[plugin_id].items()} if plugin_id in settings else {}
|
|
||||||
|
|
||||||
logger = logger.getChild(plugin_id)
|
|
||||||
parsed = 'parsed_url'
|
|
||||||
_url_fields = ['iframe_src', 'audio_src']
|
|
||||||
|
|
||||||
|
|
||||||
def on_result(_request, _search, result):
|
|
||||||
|
|
||||||
for pattern, replacement in replacements.items():
|
|
||||||
|
|
||||||
if parsed in result:
|
|
||||||
if pattern.search(result[parsed].netloc):
|
|
||||||
# to keep or remove this result from the result list depends
|
|
||||||
# (only) on the 'parsed_url'
|
|
||||||
if not replacement:
|
|
||||||
return False
|
|
||||||
result[parsed] = result[parsed]._replace(netloc=pattern.sub(replacement, result[parsed].netloc))
|
|
||||||
result['url'] = urlunparse(result[parsed])
|
|
||||||
|
|
||||||
for url_field in _url_fields:
|
|
||||||
if result.get(url_field):
|
|
||||||
url_src = urlparse(result[url_field])
|
|
||||||
if pattern.search(url_src.netloc):
|
|
||||||
if not replacement:
|
|
||||||
del result[url_field]
|
|
||||||
else:
|
|
||||||
url_src = url_src._replace(netloc=pattern.sub(replacement, url_src.netloc))
|
|
||||||
result[url_field] = urlunparse(url_src)
|
|
||||||
|
|
||||||
return True
|
|
73
searx/plugins/hostnames.py
Normal file
73
searx/plugins/hostnames.py
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||||
|
# pylint: disable=missing-module-docstring
|
||||||
|
|
||||||
|
import re
|
||||||
|
from urllib.parse import urlunparse, urlparse
|
||||||
|
|
||||||
|
from flask_babel import gettext
|
||||||
|
|
||||||
|
from searx import settings
|
||||||
|
from searx.plugins import logger
|
||||||
|
|
||||||
|
name = gettext('Hostnames plugin')
|
||||||
|
description = gettext('Rewrite hostnames, remove results or prioritize them based on the hostname')
|
||||||
|
default_on = False
|
||||||
|
preference_section = 'general'
|
||||||
|
|
||||||
|
plugin_id = 'hostnames'
|
||||||
|
|
||||||
|
replacements = {
|
||||||
|
re.compile(p): r
|
||||||
|
for (p, r) in (settings.get(plugin_id, {}).get('replace', settings.get('hostname_replace', {})).items())
|
||||||
|
}
|
||||||
|
removables = {re.compile(p) for p in settings[plugin_id].get('remove', [])}
|
||||||
|
high_priority = {re.compile(p) for p in settings[plugin_id].get('high_priority', [])}
|
||||||
|
low_priority = {re.compile(p) for p in settings[plugin_id].get('low_priority', [])}
|
||||||
|
|
||||||
|
logger = logger.getChild(plugin_id)
|
||||||
|
parsed = 'parsed_url'
|
||||||
|
_url_fields = ['iframe_src', 'audio_src']
|
||||||
|
|
||||||
|
|
||||||
|
def _matches_parsed_url(result, pattern):
|
||||||
|
return parsed in result and pattern.search(result[parsed].netloc)
|
||||||
|
|
||||||
|
|
||||||
|
def on_result(_request, _search, result):
|
||||||
|
for pattern, replacement in replacements.items():
|
||||||
|
if _matches_parsed_url(result, pattern):
|
||||||
|
logger.debug(result['url'])
|
||||||
|
result[parsed] = result[parsed]._replace(netloc=pattern.sub(replacement, result[parsed].netloc))
|
||||||
|
result['url'] = urlunparse(result[parsed])
|
||||||
|
logger.debug(result['url'])
|
||||||
|
|
||||||
|
for url_field in _url_fields:
|
||||||
|
if not result.get(url_field):
|
||||||
|
continue
|
||||||
|
|
||||||
|
url_src = urlparse(result[url_field])
|
||||||
|
if pattern.search(url_src.netloc):
|
||||||
|
url_src = url_src._replace(netloc=pattern.sub(replacement, url_src.netloc))
|
||||||
|
result[url_field] = urlunparse(url_src)
|
||||||
|
|
||||||
|
for pattern in removables:
|
||||||
|
if _matches_parsed_url(result, pattern):
|
||||||
|
return False
|
||||||
|
|
||||||
|
for url_field in _url_fields:
|
||||||
|
if not result.get(url_field):
|
||||||
|
continue
|
||||||
|
|
||||||
|
url_src = urlparse(result[url_field])
|
||||||
|
if pattern.search(url_src.netloc):
|
||||||
|
del result[url_field]
|
||||||
|
|
||||||
|
for pattern in low_priority:
|
||||||
|
if _matches_parsed_url(result, pattern):
|
||||||
|
result['priority'] = 'low'
|
||||||
|
|
||||||
|
for pattern in high_priority:
|
||||||
|
if _matches_parsed_url(result, pattern):
|
||||||
|
result['priority'] = 'high'
|
||||||
|
|
||||||
|
return True
|
|
@ -130,16 +130,25 @@ def merge_two_infoboxes(infobox1, infobox2): # pylint: disable=too-many-branche
|
||||||
infobox1['content'] = content2
|
infobox1['content'] = content2
|
||||||
|
|
||||||
|
|
||||||
def result_score(result):
|
def result_score(result, priority):
|
||||||
weight = 1.0
|
weight = 1.0
|
||||||
|
|
||||||
for result_engine in result['engines']:
|
for result_engine in result['engines']:
|
||||||
if hasattr(engines[result_engine], 'weight'):
|
if hasattr(engines[result_engine], 'weight'):
|
||||||
weight *= float(engines[result_engine].weight)
|
weight *= float(engines[result_engine].weight)
|
||||||
|
|
||||||
occurrences = len(result['positions'])
|
weight *= len(result['positions'])
|
||||||
|
score = 0
|
||||||
|
|
||||||
return sum((occurrences * weight) / position for position in result['positions'])
|
for position in result['positions']:
|
||||||
|
if priority == 'low':
|
||||||
|
continue
|
||||||
|
if priority == 'high':
|
||||||
|
score += weight
|
||||||
|
else:
|
||||||
|
score += weight / position
|
||||||
|
|
||||||
|
return score
|
||||||
|
|
||||||
|
|
||||||
class Timing(NamedTuple): # pylint: disable=missing-class-docstring
|
class Timing(NamedTuple): # pylint: disable=missing-class-docstring
|
||||||
|
@ -354,9 +363,7 @@ class ResultContainer:
|
||||||
self._closed = True
|
self._closed = True
|
||||||
|
|
||||||
for result in self._merged_results:
|
for result in self._merged_results:
|
||||||
score = result_score(result)
|
result['score'] = result_score(result, result.get('priority'))
|
||||||
result['score'] = score
|
|
||||||
|
|
||||||
# removing html content and whitespace duplications
|
# removing html content and whitespace duplications
|
||||||
if result.get('content'):
|
if result.get('content'):
|
||||||
result['content'] = utils.html_to_text(result['content']).strip()
|
result['content'] = utils.html_to_text(result['content']).strip()
|
||||||
|
@ -364,7 +371,7 @@ class ResultContainer:
|
||||||
result['title'] = ' '.join(utils.html_to_text(result['title']).strip().split())
|
result['title'] = ' '.join(utils.html_to_text(result['title']).strip().split())
|
||||||
|
|
||||||
for result_engine in result['engines']:
|
for result_engine in result['engines']:
|
||||||
counter_add(score, 'engine', result_engine, 'score')
|
counter_add(result['score'], 'engine', result_engine, 'score')
|
||||||
|
|
||||||
results = sorted(self._merged_results, key=itemgetter('score'), reverse=True)
|
results = sorted(self._merged_results, key=itemgetter('score'), reverse=True)
|
||||||
|
|
||||||
|
|
|
@ -219,7 +219,7 @@ outgoing:
|
||||||
# - 'Tracker URL remover'
|
# - 'Tracker URL remover'
|
||||||
# - 'Ahmia blacklist' # activation depends on outgoing.using_tor_proxy
|
# - 'Ahmia blacklist' # activation depends on outgoing.using_tor_proxy
|
||||||
# # these plugins are disabled if nothing is configured ..
|
# # these plugins are disabled if nothing is configured ..
|
||||||
# - 'Hostname replace' # see hostname_replace configuration below
|
# - 'Hostnames plugin' # see 'hostnames' configuration below
|
||||||
# - 'Basic Calculator'
|
# - 'Basic Calculator'
|
||||||
# - 'Open Access DOI rewrite'
|
# - 'Open Access DOI rewrite'
|
||||||
# - 'Tor check plugin'
|
# - 'Tor check plugin'
|
||||||
|
@ -228,17 +228,21 @@ outgoing:
|
||||||
# # preferences if they want.
|
# # preferences if they want.
|
||||||
# - 'Autodetect search language'
|
# - 'Autodetect search language'
|
||||||
|
|
||||||
# Configuration of the "Hostname replace" plugin:
|
# Configuration of the "Hostnames plugin":
|
||||||
#
|
#
|
||||||
# hostname_replace:
|
# hostnames:
|
||||||
|
# replace:
|
||||||
# '(.*\.)?youtube\.com$': 'invidious.example.com'
|
# '(.*\.)?youtube\.com$': 'invidious.example.com'
|
||||||
# '(.*\.)?youtu\.be$': 'invidious.example.com'
|
# '(.*\.)?youtu\.be$': 'invidious.example.com'
|
||||||
# '(.*\.)?youtube-noocookie\.com$': 'yotter.example.com'
|
|
||||||
# '(.*\.)?reddit\.com$': 'teddit.example.com'
|
# '(.*\.)?reddit\.com$': 'teddit.example.com'
|
||||||
# '(.*\.)?redd\.it$': 'teddit.example.com'
|
# '(.*\.)?redd\.it$': 'teddit.example.com'
|
||||||
# '(www\.)?twitter\.com$': 'nitter.example.com'
|
# '(www\.)?twitter\.com$': 'nitter.example.com'
|
||||||
# # to remove matching host names from result list, set value to false
|
# remove:
|
||||||
# 'spam\.example\.com': false
|
# - '(.*\.)?facebook.com$'
|
||||||
|
# low_priority:
|
||||||
|
# - '(.*\.)?google(\..*)?$'
|
||||||
|
# high_priority:
|
||||||
|
# - '(.*\.)?wikipedia.org$'
|
||||||
|
|
||||||
checker:
|
checker:
|
||||||
# disable checker when in debug mode
|
# disable checker when in debug mode
|
||||||
|
|
|
@ -36,16 +36,22 @@ enabled_plugins:
|
||||||
- 'Self Informations'
|
- 'Self Informations'
|
||||||
- 'Tracker URL remover'
|
- 'Tracker URL remover'
|
||||||
- 'Ahmia blacklist'
|
- 'Ahmia blacklist'
|
||||||
# - 'Hostname replace' # see hostname_replace configuration below
|
# - 'Hostnames plugin' # see 'hostnames' configuration below
|
||||||
# - 'Open Access DOI rewrite'
|
# - 'Open Access DOI rewrite'
|
||||||
|
|
||||||
# plugins:
|
# plugins:
|
||||||
# - only_show_green_results
|
# - only_show_green_results
|
||||||
|
|
||||||
# hostname_replace:
|
# hostnames:
|
||||||
#
|
# replace:
|
||||||
# # twitter --> nitter
|
# '(.*\.)?youtube\.com$': 'invidious.example.com'
|
||||||
# '(www\.)?twitter\.com$': 'nitter.net'
|
# '(.*\.)?youtu\.be$': 'invidious.example.com'
|
||||||
|
# remove:
|
||||||
|
# - '(.*\.)?facebook.com$'
|
||||||
|
# low_priority:
|
||||||
|
# - '(.*\.)?google\.com$'
|
||||||
|
# high_priority:
|
||||||
|
# - '(.*\.)?wikipedia.org$'
|
||||||
|
|
||||||
engines:
|
engines:
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue