Merge branch 'master' into docker/opencontainers

This commit is contained in:
Mathieu Brunot 2020-01-14 15:43:43 +01:00 committed by GitHub
commit 24472ce718
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
34 changed files with 2815 additions and 121 deletions

View file

@ -8,6 +8,9 @@ PYOBJECTS = searx
DOC = docs
PY_SETUP_EXTRAS ?= \[test\]
PYDIST=./dist/py
PYBUILD=./build/py
include utils/makefile.include
include utils/makefile.python
include utils/makefile.sphinx
@ -23,6 +26,7 @@ help:
@echo ' install - developer install (./local)'
@echo ' uninstall - uninstall (./local)'
@echo ' gh-pages - build docs & deploy on gh-pages branch'
@echo ' clean - drop builds and environments'
@echo ''
@$(MAKE) -s -f utils/makefile.include make-help
@echo ''

View file

@ -24,7 +24,107 @@ p.sidebar-title, .sidebar p {
margin: 6pt;
}
.sidebar li {
.sidebar li,
.hlist li {
list-style-type: disclosure-closed;
}
/* admonitions
*/
div.admonition, div.topic {
background-color: #fafafa;
margin: 8px 0px;
padding: 1em;
border-radius: 3pt 0 0 3pt;
border-top: none;
border-right: none;
border-bottom: none;
border-left: 5pt solid #ccc;
}
p.admonition-title:after {
content: none;
}
.admonition.hint { border-color: #416dc0b0; }
.admonition.note { border-color: #6c856cb0; }
.admonition.tip { border-color: #85c5c2b0; }
.admonition.attention { border-color: #ecec97b0; }
.admonition.caution { border-color: #a6c677b0; }
.admonition.danger { border-color: #d46262b0; }
.admonition.important { border-color: #dfa3a3b0; }
.admonition.error { border-color: red; }
.admonition.warning { border-color: darkred; }
.admonition.admonition-generic-admonition-title {
border-color: #416dc0b0;
}
/* admonitions with (rendered) reST markup examples (:class: rst-example)
*
* .. admonition:: title of the example
* :class: rst-example
* ....
*/
div.rst-example {
background-color: inherit;
margin: 0;
border-top: none;
border-right: 1px solid #ccc;
border-bottom: none;
border-left: none;
border-radius: none;
padding: 0;
}
div.rst-example > p.admonition-title {
font-family: Sans Serif;
font-style: italic;
font-size: 0.8em;
display: block;
border-bottom: 1px solid #ccc;
padding: 0.5em 1em;
text-align: right;
}
/* code block in figures
*/
div.highlight pre {
text-align: left;
}
/* Table theme
*/
thead, tfoot {
background-color: #fff;
}
th:hover, td:hover {
background-color: #ffc;
}
thead th, tfoot th, tfoot td, tbody th {
background-color: #fffaef;
}
tbody tr:nth-child(odd) {
background-color: #fff;
}
tbody tr:nth-child(even) {
background-color: #fafafa;
}
caption {
font-family: Sans Serif;
padding: 0.5em;
margin: 0.5em 0 0.5em 0;
caption-side: top;
text-align: left;
}

View file

@ -0,0 +1,33 @@
digraph G {
node [style=filled, shape=box, fillcolor="#ffffcc", fontname="Sans"];
edge [fontname="Sans"];
browser [label="Browser", shape=Mdiamond];
rp [label="Reverse Proxy", href="url to configure reverse proxy"];
filtron [label="Filtron", href="https://github.com/asciimoo/filtron"];
morty [label="Morty", href="https://github.com/asciimoo/morty"];
static [label="Static files", href="url to configure static files"];
uwsgi [label="uwsgi", href="url to configure uwsgi"]
searx1 [label="Searx #1"];
searx2 [label="Searx #2"];
searx3 [label="Searx #3"];
searx4 [label="Searx #4"];
browser -> rp [label="HTTPS"]
subgraph cluster_searx {
label = "Searx instance" fontname="Sans";
bgcolor="#fafafa";
{ rank=same; static rp };
rp -> morty [label="optional: images and HTML pages proxy"];
rp -> static [label="optional: reverse proxy serves directly static files"];
rp -> filtron [label="HTTP"];
filtron -> uwsgi [label="HTTP"];
uwsgi -> searx1;
uwsgi -> searx2;
uwsgi -> searx3;
uwsgi -> searx4;
}
}

View file

@ -0,0 +1,24 @@
.. _architecture:
============
Architecture
============
.. sidebar:: Needs work!
This article needs some work / Searx is a collaborative effort. If you have
any contribution, feel welcome to send us your :pull:`PR <../pulls>`, see
:ref:`how to contribute`.
Herein you will find some hints and suggestions about typical architectures of
searx infrastructures.
We start with a contribution from :pull:`@dalf <1776#issuecomment-567917320>`.
It shows a *reference* setup for public searx instances.
.. _arch public:
.. kernel-figure:: arch_public.dot
:alt: arch_public.dot
Reference architecture of a public searx setup.

103
docs/admin/buildhosts.rst Normal file
View file

@ -0,0 +1,103 @@
.. _buildhosts:
==========
Buildhosts
==========
.. sidebar:: This article needs some work
If you have any contribution send us your :pull:`PR <../pulls>`, see
:ref:`how to contribute`.
To get best results from build, its recommend to install additional packages
on build hosts.
.. _docs build:
Build docs
==========
.. _Graphviz: https://graphviz.gitlab.io
.. _ImageMagick: https://www.imagemagick.org
.. _XeTeX: https://tug.org/xetex/
.. _dvisvgm: https://dvisvgm.de/
.. sidebar:: Sphinx build needs
- ImageMagick_
- Graphviz_
- XeTeX_
- dvisvgm_
Most of the sphinx requirements are installed from :origin:`setup.py` and the
docs can be build from scratch with ``make docs``. For better math and image
processing additional packages are needed. The XeTeX_ needed not only for PDF
creation, its also needed for :ref:`math` when HTML output is build.
To be able to do :ref:`sphinx:math-support` without CDNs, the math are rendered
as images (``sphinx.ext.imgmath`` extension). If your docs build (``make
docs``) shows warnings like this::
WARNING: dot(1) not found, for better output quality install \
graphviz from http://www.graphviz.org
..
WARNING: LaTeX command 'latex' cannot be run (needed for math \
display), check the imgmath_latex setting
you need to install additional packages on your build host, to get better HTML
output.
.. _system requirements:
.. tabs::
.. group-tab:: Ubuntu / debian
.. code-block:: sh
$ sudo apt install graphviz imagemagick texlive-xetex librsvg2-bin
.. group-tab:: Arch Linux
.. code-block:: sh
$ sudo pacman -S graphviz imagemagick texlive-bin extra/librsvg
.. group-tab:: Fedora / RHEL
.. code-block:: sh
$ sudo dnf install graphviz graphviz-gd texlive-xetex-bin librsvg2-tools
For PDF output you also need:
.. tabs::
.. group-tab:: Ubuntu / debian
.. code:: sh
$ sudo apt texlive-latex-recommended texlive-extra-utils ttf-dejavu
.. group-tab:: Arch Linux
.. code:: sh
$ sudo pacman -S texlive-core texlive-latexextra ttf-dejavu
.. group-tab:: Fedora / RHEL
.. code:: sh
$ sudo dnf install \
texlive-collection-fontsrecommended texlive-collection-latex \
dejavu-sans-fonts dejavu-serif-fonts dejavu-sans-mono-fonts
.. _system requirements END:
.. literalinclude:: ../conf.py
:language: python
:start-after: # sphinx.ext.imgmath setup
:end-before: # sphinx.ext.imgmath setup END

71
docs/admin/engines.rst Normal file
View file

@ -0,0 +1,71 @@
.. _engines generic:
=======
Engines
=======
.. sidebar:: Further reading ..
- :ref:`settings engine`
- :ref:`engine settings`
- :ref:`engine file`
============= =========== ==================== ============
:ref:`engine settings` :ref:`engine file`
------------------------- ---------------------------------
Name (cfg) Categories
------------------------- ---------------------------------
Engine .. Paging support **P**
------------------------- -------------------- ------------
Shortcut **S** Language support **L**
Timeout **TO** Time range support **TR**
Disabled **D** Offline **O**
------------- ----------- -------------------- ------------
Safe search **SS**
------------- ----------- ---------------------------------
Weigth **W**
------------- ----------- ---------------------------------
Disabled **D**
============= =========== =================================
Configuration defaults (at built time):
.. _configured engines:
.. jinja:: webapp
.. flat-table:: Engines configured at built time (defaults)
:header-rows: 1
:stub-columns: 2
* - Name (cfg)
- S
- Engine
- TO
- Categories
- P
- L
- SS
- D
- TR
- O
- W
- D
{% for name, mod in engines.items() %}
* - {{name}}
- !{{mod.shortcut}}
- {{mod.__name__}}
- {{mod.timeout}}
- {{", ".join(mod.categories)}}
- {{(mod.paging and "y") or ""}}
- {{(mod.language_support and "y") or ""}}
- {{(mod.safesearch and "y") or ""}}
- {{(mod.disabled and "y") or ""}}
- {{(mod.time_range_support and "y") or ""}}
- {{(mod.offline and "y") or ""}}
- {{mod.weight or 1 }}
- {{(mod.disabled and "y") or ""}}
{% endfor %}

View file

@ -6,6 +6,11 @@ Administrator documentation
:maxdepth: 1
installation
settings
api
architecture
filtron
morty
engines
plugins
buildhosts

View file

@ -114,6 +114,9 @@ content:
# Module to import
module = searx.webapp
# Support running the module from a webserver subdirectory.
route-run = fixpathinfo:
# Virtualenv and python path
virtualenv = /usr/local/searx/searx-ve/
pythonpath = /usr/local/searx/
@ -151,7 +154,10 @@ content:
server {
listen 80;
server_name searx.example.com;
root /usr/local/searx;
root /usr/local/searx/searx;
location /static {
}
location / {
include uwsgi_params;
@ -180,14 +186,13 @@ Add this configuration in the server config file
.. code:: nginx
location = /searx { rewrite ^ /searx/; }
location /searx {
try_files $uri @searx;
location /searx/static {
alias /usr/local/searx/searx/static;
}
location @searx {
location /searx {
uwsgi_param SCRIPT_NAME /searx;
include uwsgi_params;
uwsgi_modifier1 30;
uwsgi_pass unix:/run/uwsgi/app/searx/socket;
}
@ -197,6 +202,10 @@ in case of single-user or low-traffic instances.)
.. code:: nginx
location /searx/static {
alias /usr/local/searx/searx/static;
}
location /searx {
proxy_pass http://127.0.0.1:8888;
proxy_set_header Host $host;
@ -338,4 +347,3 @@ References
* How to: `Setup searx in a couple of hours with a free SSL certificate
<https://www.reddit.com/r/privacytoolsIO/comments/366kvn/how_to_setup_your_own_privacy_respecting_search/>`__

39
docs/admin/plugins.rst Normal file
View file

@ -0,0 +1,39 @@
.. _plugins generic:
===============
Plugins builtin
===============
.. sidebar:: Further reading ..
- :ref:`dev plugin`
Configuration defaults (at built time):
:DO: Default on
.. _configured plugins:
.. jinja:: webapp
.. flat-table:: Plugins configured at built time (defaults)
:header-rows: 1
:stub-columns: 1
:widths: 3 1 9
* - Name
- DO
- Description
JS & CSS dependencies
{% for plgin in plugins %}
* - {{plgin.name}}
- {{(plgin.default_on and "y") or ""}}
- {{plgin.description}}
{% for dep in (plgin.js_dependencies + plgin.css_dependencies) %}
| ``{{dep}}`` {% endfor %}
{% endfor %}

181
docs/admin/settings.rst Normal file
View file

@ -0,0 +1,181 @@
.. _settings.yml:
================
``settings.yml``
================
.. sidebar:: Further reading ..
- :ref:`search API`
This page describe the options possibilities of the settings.yml file.
.. _settings global:
Global Settings
===============
.. code:: yaml
server:
port : 8888
secret_key : "ultrasecretkey" # change this!
debug : False # debug mode, only for development
request_timeout : 2.0 # seconds
base_url : False # set custom base_url (or False)
themes_path : "" # custom ui themes path
default_theme : oscar # ui theme
useragent_suffix : "" # suffix of searx_useragent, could contain
# informations like admins email address
image_proxy : False # proxying image results through searx
default_locale : "" # default interface locale
# uncomment below section if you want to use a proxy
#outgoing_proxies :
# http : http://127.0.0.1:8080
# https: http://127.0.0.1:8080
# uncomment below section only if you have more than one network interface
# which can be the source of outgoing search requests
#source_ips:
# - 1.1.1.1
# - 1.1.1.2
locales:
en : English
de : Deutsch
he : Hebrew
hu : Magyar
fr : Français
es : Español
it : Italiano
nl : Nederlands
ja : 日本語 (Japanese)
tr : Türkçe
ru : Russian
ro : Romanian
``port`` :
Port number of the searx web application if you run it directly using ``python
searx/webapp.py``. Doesn't apply to searx running on Apache or Nginx.
``secret_key`` :
Used for cryptography purpose.
``debug`` :
Allow a more detailed log if you run searx directly. Display *detailed* error
messages in the browser too, so this must be deactivated in production.
``request_timeout`` :
Global timeout of the requests made to others engines in seconds. A bigger
timeout will allow to wait for answers from slow engines, but in consequence
will slow searx reactivity (the result page may take the time specified in the
timeout to load)
``base_url`` :
The base URL where searx is deployed. Used to create correct inbound links.
``themes_path`` :
Path to where the themes are located. If you didn't develop anything, leave it
blank.
``default_theme`` :
Name of the theme you want to use by default on you searx instance.
``useragent_suffix`` :
Suffix to the user-agent searx uses to send requests to others engines. If an
engine wish to block you, a contact info here may be useful to avoid that.
``image_proxy`` :
Allow your instance of searx of being able to proxy images. Uses memory space.
``default_locale`` :
Aearx interface language. If blank, the locale is detected by using the
browser language. If it doesn't work, or you are deploying a language
specific instance of searx, a locale can be defined using an ISO language
code, like ``fr``, ``en``, ``de``.
.. _requests proxies: http://docs.python-requests.org/en/latest/user/advanced/#proxies
.. _PR SOCKS support: https://github.com/kennethreitz/requests/pull/478
``outgoing_proxies`` :
Define a proxy you wish to use, see `requests proxies`_. SOCKS proxies are
not supported / see `PR SOCKS support`.
``source_ips`` :
If you use multiple nework interfaces, define from which IP the requests must
be made.
``locales`` :
Locales codes and their names. Available translations of searx interface.
.. _settings engine:
Engine settings
===============
.. sidebar:: Further reading ..
- :ref:`engines-dev`
.. code:: yaml
- name : bing
engine : bing
shortcut : bi
base_url : 'https://{language}.wikipedia.org/'
categories : general
timeout : 3.0
api_key : 'apikey'
disabled : True
language : en_US
``name`` :
Name that will be used accross searx to define this engine. In settings, on
the result page...
``engine`` :
Name of the python file used to handle requests and responses to and from this
search engine.
``shortcut`` :
Code used to execute bang requests (in this case using ``!bi`` or ``?bi``)
``base_url`` : optional
Part of the URL that should be stable accross every request. Can be useful to
use multiple sites using only one engine, or updating the site URL without
touching at the code.
``categories`` : optional
Define in which categories this engine will be active. Most of the time, it is
defined in the code of the engine, but in a few cases it is useful, like when
describing multiple search engine using the same code.
``timeout`` : optional
Timeout of the search with the current search engine. **Be careful, it will
modify the global timeout of searx.**
``api_key`` : optional
In a few cases, using an API needs the use of a secret key. How to obtain them
is described in the file.
``disabled`` : optional
To disable by default the engine, but not deleting it. It will allow the user
to manually activate it in the settings.
``language`` : optional
If you want to use another language for a specific engine, you can define it
by using the full ISO code of language and country, like ``fr_FR``, ``en_US``,
``de_DE``.
``weigth`` : default ``1``
Weighting of the results of this engine.
.. note::
A few more options are possible, but they are pretty specific to some
engines, and so won't be described here.

View file

@ -14,11 +14,18 @@ project = u'searx'
copyright = u'2015-2019, Adam Tauber, Noémi Ványi'
author = u'Adam Tauber'
release, version = VERSION_STRING, VERSION_STRING
highlight_language = 'none'
# General --------------------------------------------------------------
master_doc = "index"
source_suffix = '.rst'
numfig = True
from searx import webapp
jinja_contexts = {
'webapp': dict(**webapp.__dict__)
}
# usage:: lorem :patch:`f373169` ipsum
extlinks = {}
@ -32,21 +39,40 @@ extlinks['origin'] = (GIT_URL + '/blob/master/%s', 'git://')
extlinks['patch'] = (GIT_URL + '/commit/%s', '#')
extlinks['search'] = (SEARX_URL + '/%s', '#')
extlinks['docs'] = (DOCS_URL + '/%s', 'docs: ')
extlinks['pypi'] = ('https://pypi.org/project/%s', 'PyPi: ')
extlinks['man'] = ('https://manpages.debian.org/jump?q=%s', '')
#extlinks['role'] = (
# 'https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-%s', '')
extlinks['duref'] = (
'http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#%s', '')
extlinks['durole'] = (
'http://docutils.sourceforge.net/docs/ref/rst/roles.html#%s', '')
extlinks['dudir'] = (
'http://docutils.sourceforge.net/docs/ref/rst/directives.html#%s', '')
extlinks['ctan'] = (
'https://ctan.org/pkg/%s', 'CTAN: ')
extensions = [
'sphinx.ext.imgmath',
'sphinx.ext.extlinks',
'sphinx.ext.viewcode',
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"pallets_sphinx_themes",
"sphinx_issues", # https://github.com/sloria/sphinx-issues/blob/master/README.rst
"sphinxcontrib.jinja", # https://github.com/tardyp/sphinx-jinja
'linuxdoc.rstFlatTable', # Implementation of the 'flat-table' reST-directive.
'linuxdoc.kfigure', # Sphinx extension which implements scalable image handling.
"sphinx_tabs.tabs", # https://github.com/djungelorm/sphinx-tabs
]
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
# "flask": ("https://flask.palletsprojects.com/", None),
"flask": ("https://flask.palletsprojects.com/", None),
# "werkzeug": ("https://werkzeug.palletsprojects.com/", None),
# "jinja": ("https://jinja.palletsprojects.com/", None),
"jinja": ("https://jinja.palletsprojects.com/", None),
"linuxdoc" : ("https://return42.github.io/linuxdoc/", None),
"sphinx" : ("https://www.sphinx-doc.org/en/master/", None),
}
issues_github_path = "asciimoo/searx"
@ -54,16 +80,21 @@ issues_github_path = "asciimoo/searx"
# HTML -----------------------------------------------------------------
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes']
html_theme = "searx"
# sphinx.ext.imgmath setup
html_math_renderer = 'imgmath'
imgmath_image_format = 'svg'
imgmath_font_size = 14
# sphinx.ext.imgmath setup END
html_theme_options = {"index_sidebar_logo": True}
html_context = {
"project_links": [
ProjectLink("Source", GIT_URL),
ProjectLink("Wiki", "https://github.com/asciimoo/searx/wiki"),
ProjectLink("Public instances", "https://github.com/asciimoo/searx/wiki/Searx-instances"),
ProjectLink("Public instances", "https://asciimoo.github.io/searx/user/public_instances.html"),
ProjectLink("Twitter", "https://twitter.com/Searx_engine"),
]
}

View file

@ -1,3 +1,5 @@
.. _how to contribute:
=================
How to contribute
=================
@ -48,7 +50,20 @@ Code
====
.. _PEP8: https://www.python.org/dev/peps/pep-0008/
.. _Conventional Commits: https://www.conventionalcommits.org/
.. _Git Commit Good Practice: https://wiki.openstack.org/wiki/GitCommitMessages
.. _Structural split of changes:
https://wiki.openstack.org/wiki/GitCommitMessages#Structural_split_of_changes
.. _gitmoji: https://gitmoji.carloscuesta.me/
.. _Semantic PR: https://github.com/zeke/semantic-pull-requests
.. sidebar:: Create good commits!
- `Structural split of changes`_
- `Conventional Commits`_
- `Git Commit Good Practice`_
- some like to use: gitmoji_
- not yet active: `Semantic PR`_
In order to submit a patch, please follow the steps below:
@ -57,6 +72,9 @@ In order to submit a patch, please follow the steps below:
- PEP8_ standards apply, except the convention of line length
- Maximum line length is 120 characters
- The cardinal rule for creating good commits is to ensure there is only one
*logical change* per commit / read `Structural split of changes`_
- Check if your code breaks existing tests. If so, update the tests or fix your
code.
@ -64,6 +82,16 @@ In order to submit a patch, please follow the steps below:
- Add yourself to the :origin:`AUTHORS.rst` file.
- Choose meaning full commit messages, read `Conventional Commits`_
.. code::
<type>[optional scope]: <description>
[optional body]
[optional footer(s)]
- Create a pull request.
For more help on getting started with searx development, see :ref:`devquickstart`.
@ -79,6 +107,8 @@ Translation currently takes place on :ref:`transifex <translation>`.
Please, do not update translation files in the repo.
.. _contrib docs:
Documentation
=============
@ -91,7 +121,7 @@ Documentation
The documentation is built using Sphinx_. So in order to be able to generate
the required files, you have to install it on your system. Much easier, use
Makefile our targets.
our :ref:`makefile`.
Here is an example which makes a complete rebuild:
@ -101,6 +131,7 @@ Here is an example which makes a complete rebuild:
...
The HTML pages are in dist/docs.
.. _make docs-live:
live build
----------
@ -110,9 +141,10 @@ live build
It is recommended to assert a complete rebuild before deploying (use
``docs-clean``).
Live build is like WYSIWYG, If you want to edit the documentation, its
recommended to use. The Makefile target ``docs-live`` builds the docs, opens URL
in your favorite browser and rebuilds every time a reST file has been changed.
Live build is like WYSIWYG. If you want to edit the documentation, its
recommended to use. The Makefile target ``docs-live`` builds the docs, opens
URL in your favorite browser and rebuilds every time a reST file has been
changed.
.. code:: sh
@ -123,12 +155,13 @@ in your favorite browser and rebuilds every time a reST file has been changed.
... Start watching changes
.. _deploy on github.io:
deploy on github.io
-------------------
To deploy documentation at :docs:`github.io <.>` use Makefile target
``gh-pages``, which will builds the documentation, clones searx into a sub
:ref:`make gh-pages`, which will builds the documentation, clones searx into a sub
folder ``gh-pages``, cleans it, copies the doc build into and runs all the
needed git add, commit and push:

6
docs/dev/csv_table.txt Normal file
View file

@ -0,0 +1,6 @@
stub col row 1, column, "loremLorem ipsum dolor sit amet, consetetur sadipscing elitr, sed diam nonumy
eirmod tempor invidunt ut labore et dolore magna aliquyam erat, sed diam
voluptua."
stub col row 1, "At vero eos et accusam et justo duo dolores et ea rebum. Stet clita
kasd gubergren, no sea takimata sanctus est Lorem ipsum dolor sit amet.", column
stub col row 1, column, column

View file

@ -29,6 +29,7 @@ the ones in the engine file.
It does not matter if an option is stored in the engine file or in the
settings. However, the standard way is the following:
.. _engine file:
engine file
-----------
@ -43,6 +44,7 @@ time_range_support boolean support search time range
offline boolean engine runs offline
======================= =========== ===========================================
.. _engine settings:
settings.yml
------------

3
docs/dev/hello.dot Normal file
View file

@ -0,0 +1,3 @@
graph G {
Hello -- World
}

View file

@ -11,3 +11,5 @@ Developer documentation
search_api
plugins
translation
makefile
reST

221
docs/dev/makefile.rst Normal file
View file

@ -0,0 +1,221 @@
.. _makefile:
================
Makefile Targets
================
.. _gnu-make: https://www.gnu.org/software/make/manual/make.html#Introduction
.. sidebar:: build environment
Before looking deeper at the targets, first read about :ref:`makefile setup`
and :ref:`make pyenv`.
With the aim to simplify development cycles, started with :pull:`1756` a
``Makefile`` based boilerplate was added. If you are not familiar with
Makefiles, we recommend to read gnu-make_ introduction.
The usage is simple, just type ``make {target-name}`` to *build* a target.
Calling the ``help`` target gives a first overview::
$ make help
test - run developer tests
docs - build documentation
docs-live - autobuild HTML documentation while editing
run - run developer instance
install - developer install (./local)
uninstall - uninstall (./local)
gh-pages - build docs & deploy on gh-pages branch
clean - drop builds and environments
...
.. contents:: Contents
:depth: 2
:local:
:backlinks: entry
.. _makefile setup:
Setup
=====
.. _git stash: https://git-scm.com/docs/git-stash
The main setup is done in the :origin:`Makefile`::
export GIT_URL=https://github.com/asciimoo/searx
export SEARX_URL=https://searx.me
export DOCS_URL=https://asciimoo.github.io/searx
.. sidebar:: fork & upstream
Commit changes in your (local) branch, fork or whatever, but do not push them
upstream / `git stash`_ is your friend.
:GIT_URL: Changes this, to point to your searx fork.
:SEARX_URL: Changes this, to point to your searx instance.
:DOCS_URL: If you host your own (branded) documentation, change this URL.
.. _make pyenv:
Python environment
==================
.. sidebar:: activate environment
``source ./local/py3/bin/activate``
With Makefile we do no longer need to build up the virualenv manually (as
described in the :ref:`devquickstart` guide). Jump into your git working tree
and release a ``make pyenv``:
.. code:: sh
$ cd ~/searx-clone
$ make pyenv
PYENV usage: source ./local/py3/bin/activate
...
With target ``pyenv`` a development environment (aka virtualenv) was build up in
``./local/py3/``. To make a *developer install* of searx (:origin:`setup.py`)
into this environment, use make target ``install``:
.. code:: sh
$ make install
PYENV usage: source ./local/py3/bin/activate
PYENV using virtualenv from ./local/py3
PYENV install .
You have never to think about intermediate targets like ``pyenv`` or
``install``, the ``Makefile`` chains them as requisites. Just run your main
target.
.. sidebar:: drop environment
To get rid of the existing environment before re-build use :ref:`clean target
<make clean>` first.
If you think, something goes wrong with your ./local environment or you change
the :origin:`setup.py` file (or the requirements listed in
:origin:`requirements-dev.txt` and :origin:`requirements.txt`), you have to call
:ref:`make clean`.
.. _make run:
``make run``
============
To get up a running a developer instance simply call ``make run``. This enables
*debug* option in :origin:`searx/settings.yml`, starts a ``./searx/webapp.py``
instance, disables *debug* option again and opens the URL in your favorite WEB
browser (:man:`xdg-open`):
.. code:: sh
$ make run
PYENV usage: source ./local/py3/bin/activate
PYENV install .
./local/py3/bin/python ./searx/webapp.py
...
INFO:werkzeug: * Running on http://127.0.0.1:8888/ (Press CTRL+C to quit)
...
.. _make clean:
``make clean``
==============
Drop all intermediate files, all builds, but keep sources untouched. Includes
target ``pyclean`` which drops ./local environment. Before calling ``make
clean`` stop all processes using :ref:`make pyenv`.
.. code:: sh
$ make clean
CLEAN pyclean
CLEAN clean
.. _make docs:
``make docs docs-live docs-clean``
==================================
We describe the usage of the ``doc*`` targets in the :ref:`How to contribute /
Documentation <contrib docs>` section. If you want to edit the documentation
read our :ref:`make docs-live` section. If you are working in your own brand,
adjust your :ref:`Makefile setup <makefile setup>`.
.. _make gh-pages:
``make gh-pages``
=================
To deploy on github.io first adjust your :ref:`Makefile setup <makefile
setup>`. For any further read :ref:`deploy on github.io`.
.. _make test:
``make test``
=============
Runs a series of tests: ``test.pep8``, ``test.unit``, ``test.robot`` and does
additional :ref:`pylint checks <make pylint>`. You can run tests selective,
e.g.:
.. code:: sh
$ make test.pep8 test.unit
. ./local/py3/bin/activate; ./manage.sh pep8_check
[!] Running pep8 check
. ./local/py3/bin/activate; ./manage.sh unit_tests
[!] Running unit tests
.. _make pylint:
``make pylint``
===============
.. _Pylint: https://www.pylint.org/
Before commiting its recommend to do some (more) linting. Pylint_ is known as
one of the best source-code, bug and quality checker for the Python programming
language. Pylint_ is not yet a quality gate within our searx project (like
:ref:`test.pep8 <make test>` it is), but Pylint_ can help to improve code
quality anyway. The pylint profile we use at searx project is found in
project's root folder :origin:`.pylintrc`.
Code quality is a ongoing process. Don't try to fix all messages from Pylint,
run Pylint and check if your changed lines are bringing up new messages. If so,
fix it. By this, code quality gets incremental better and if there comes the
day, the linting is balanced out, we might decide to add Pylint as a quality
gate.
``make pybuild``
================
.. _PyPi: https://pypi.org/
.. _twine: https://twine.readthedocs.io/en/latest/
Build Python packages in ``./dist/py``.
.. code:: sh
$ make pybuild
...
BUILD pybuild
running sdist
running egg_info
...
$ ls ./dist/py/
searx-0.15.0-py3-none-any.whl searx-0.15.0.tar.gz
To upload packages to PyPi_, there is also a ``upload-pypi`` target. It needs
twine_ to be installed. Since you are not the owner of :pypi:`searx` you will
never need the latter.

View file

@ -1,7 +1,13 @@
.. _dev plugin:
=======
Plugins
=======
.. sidebar:: Further reading ..
- :ref:`plugins generic`
Plugins can extend or replace functionality of various components of searx.
Example plugin

View file

@ -4,15 +4,23 @@
Development Quickstart
======================
.. sidebar:: :ref:`makefile`
For additional developer purpose there are :ref:`makefile`.
This quickstart guide gets your environment set up with searx. Furthermore, it
gives a short introduction to the ``manage.sh`` script.
How to setup your development environment
=========================================
.. sidebar:: :ref:`make pyenv <make pyenv>`
Alternatively use the :ref:`make pyenv`.
First, clone the source code of searx to the desired folder. In this case the
source is cloned to ``~/myprojects/searx``. Then create and activate the
searx-ve virtualenv and install the required packages using manage.sh.
searx-ve virtualenv and install the required packages using ``manage.sh``.
.. code:: sh
@ -27,6 +35,10 @@ searx-ve virtualenv and install the required packages using manage.sh.
How to run tests
================
.. sidebar:: :ref:`make test.unit <make test>`
Alternatively use the ``test.pep8``, ``test.unit``, ``test.robot`` targets.
Tests can be run using the ``manage.sh`` script. Following tests and checks are
available:
@ -41,7 +53,8 @@ For example unit tests are run with the command below:
./manage.sh unit_tests
For further test options, please consult the help of the ``manage.sh`` script.
For further test options, please consult the help of the ``manage.sh`` script or
read :ref:`make test`.
How to compile styles and javascript
@ -97,6 +110,11 @@ After installing grunt, the files can be built using the following command:
Tips for debugging/development
==============================
.. sidebar:: :ref:`make run`
Makefile target ``run`` already enables debug option for your developer
session / see :ref:`make run`.
Turn on debug logging
Whether you are working on a new engine or trying to eliminate a bug, it is
always a good idea to turn on debug logging. When debug logging is enabled a
@ -104,6 +122,10 @@ Turn on debug logging
message. It can be turned on by setting ``debug: False`` to ``debug: True`` in
:origin:`settings.yml <searx/settings.yml>`.
.. sidebar:: :ref:`make test`
Alternatively use the :ref:`make test` targets.
Run ``./manage.sh tests`` before creating a PR.
Failing build on Travis is common because of PEP8 checks. So a new commit
must be created containing these format fixes. This phase can be skipped if

1428
docs/dev/reST.rst Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,3 +1,5 @@
.. _search API:
==========
Search API
==========
@ -14,6 +16,12 @@ Furthermore, two enpoints ``/`` and ``/search`` are available for querying.
Parameters
==========
.. sidebar:: Further reading ..
- :ref:`engines-dev`
- :ref:`settings.yml`
- :ref:`engines generic`
``q`` : required
The search query. This string is passed to external search services. Thus,
searx supports syntax of each search service. For example, ``site:github.com
@ -26,48 +34,48 @@ Parameters
``categories`` : optional
Comma separated list, specifies the active search categories
``engines``: optional
``engines`` : optional
Comma separated list, specifies the active search engines.
``lang``: default ``all``
``lang`` : default ``all``
Code of the language.
``pageno``: default ``1``
``pageno`` : default ``1``
Search page number.
``time_range``: optional
``time_range`` : optional
[ ``day``, ``month``, ``year`` ]
Time range of search for engines which support it. See if an engine supports
time range search in the preferences page of an instance.
``format``: optional
``format`` : optional
[ ``json``, ``csv``, ``rss`` ]
Output format of results.
``results_on_new_tab``: default ``0``
``results_on_new_tab`` : default ``0``
[ ``0``, ``1`` ]
Open search results on new tab.
``image_proxy``: default ``False``
``image_proxy`` : default ``False``
[ ``True``, ``False`` ]
Proxy image results through searx.
``autocomplete``: default *empty*
``autocomplete`` : default *empty*
[ ``google``, ``dbpedia``, ``duckduckgo``, ``startpage``, ``wikipedia`` ]
Service which completes words as you type.
``safesearch``: default ``None``
``safesearch`` : default ``None``
[ ``0``, ``1``, ``None`` ]
Filter search results of engines which support safe search. See if an engine
supports safe search in the preferences page of an instance.
``theme``: default ``oscar``
``theme`` : default ``oscar``
[ ``oscar``, ``simple``, ``legacy``, ``pix-art``, ``courgette`` ]
Theme of instance.
@ -76,7 +84,7 @@ Parameters
instance administrator deleted, created or renamed themes on his/her instance.
See the available options in the preferences page of the instance.
``oscar-style``: default ``logicodev``
``oscar-style`` : default ``logicodev``
[ ``pointhi``, ``logicodev`` ]
Style of Oscar theme. It is only parsed if the theme of an instance is
@ -86,7 +94,7 @@ Parameters
instance administrator deleted, created or renamed styles on his/her
instance. See the available options in the preferences page of the instance.
``enabled_plugins``: optional
``enabled_plugins`` : optional
List of enabled plugins.
:default: ``HTTPS_rewrite``, ``Self_Informations``,
@ -104,9 +112,9 @@ Parameters
``Vim-like_hotkeys``, ``Self_Informations``, ``Tracker_URL_remover``,
``Search_on_category_select``
``enabled_engines``: optional : *all* :origin:`engines <searx/engines>`
``enabled_engines`` : optional : *all* :origin:`engines <searx/engines>`
List of enabled engines.
``disabled_engines``: optional : *all* :origin:`engines <searx/engines>`
``disabled_engines`` : optional : *all* :origin:`engines <searx/engines>`
List of disabled engines.

10
docs/dev/svg_image.svg Normal file
View file

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="UTF-8"?>
<!-- originate: https://commons.wikimedia.org/wiki/File:Variable_Resistor.svg -->
<svg xmlns="http://www.w3.org/2000/svg"
version="1.1" baseProfile="full"
width="70px" height="40px" viewBox="0 0 700 400">
<line x1="0" y1="200" x2="700" y2="200" stroke="black" stroke-width="20px"/>
<rect x="100" y="100" width="500" height="200" fill="white" stroke="black" stroke-width="20px"/>
<line x1="180" y1="370" x2="500" y2="50" stroke="black" stroke-width="15px"/>
<polygon points="585 0 525 25 585 50" transform="rotate(135 525 25)"/>
</svg>

After

Width:  |  Height:  |  Size: 580 B

View file

@ -5,5 +5,6 @@ User documentation
.. toctree::
:maxdepth: 1
public_instances
search_syntax
own-instance

View file

@ -0,0 +1,318 @@
.. _public instances:
..
links has been ported from markdown to reST by::
regexpr: \[([^\]]*)\]\(([^)]*)\)
substitution: `\1 <\2>`__
======================
Public Searx instances
======================
.. _mailing list: mailto:searx-instances@autistici.org
.. _subscription page: https://www.autistici.org/mailman/listinfo/searx-instances
Useful information
==================
* Up-to-date health report available on https://stats.searx.xyz [1]_, for onion
(tor) services: https://stats.searx.xyz/tor.html
* Searx instances `mailing list`_ & `subscription page`_.
* Some of the Searx instances have a CAcert SSL certificate. You can install the
missing root cert `from here <http://www.cacert.org/index.php?id=3>`__.
* To add your own Searx instance to this page send us your PR. A GitHub account
is required to send PR or add an issue.
.. [1] Note that most of the instances with a A+ grade in CSP column in this
site are not fully functional - for example auto-completion may not work.
List of public Searx instances
==============================
Meta-searx instances
====================
These are websites that source from other searx instances. These are useful if
you can't decide which Searx instance to use:
.. flat-table:: Meta-searx instances
:header-rows: 1
:stub-columns: 0
:widths: 2 1 2 4 4
* - clearnet host
- onion host
- issuer
- source selection method
- extra privacy features
* - `Neocities <https://searx.neocities.org/>`__
- n/a
- Comodo (`Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.neocities.org>`__)
- Redirects users directly to a random selection of any known running
server after entering query. Requires
Javascript. `Changelog <https://searx.neocities.org/changelog.html>`__.
- Excludes servers with user tracking and analytics or are proxied through
Cloudflare.
* - `Searxes <https://searxes.danwin1210.me/>`__ @Danwin
- onion v3 `hidden service
<http://searxes.nmqnkngye4ct7bgss4bmv5ca3wpa55yugvxen5kz2bbq67lwy6ps54yd.onion/>`__
- Let's Encrypt (`Verification
<https://www.ssllabs.com/ssltest/analyze.html?d=searxes.danwin1210.me>`__)
- sources data from a randomly selected running server that satisfies
admin's quality standards which is used for post-processing
- filters out privacy-hostile websites (like CloudFlare) and either marks
them as such or folds them below the high ranking results.
Alive and running
=================
**BEFORE EDITING**: Please add your Searx instance by respecting the alphabetic order.
.. note::
Public instances listed here may yield less accurate results as they have
much higher traffic and consequently have a higher chance of being blocked by
search providers such as Google, Qwant, Bing, Startpage, etc. Hosting your
own instance or using an instance that isn't listed here may give you a more
consistent search experience.
* `ai.deafpray.wtf/searx <https://ai.deafpray.wtf/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=ai.deafpray.wtf/searx>`__
* `bamboozle.it <https://bamboozle.it/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=bamboozle.it>`__
* `bee.jaekr.dev <https://bee.jaekr.dev>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=bee.jaekr.dev>`__
* `beezboo.com <https://beezboo.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=beezboo.com>`__
* `burtrum.org/searx <https://burtrum.org/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=burtrum.org/searx>`__
* `darmarit.cloud/searx <https://darmarit.cloud/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=darmarit.cloud/searx>`__
* `dc.ax <https://dc.ax>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=dc.ax>`__
* `dynabyte.ca <https://dynabyte.ca>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=dynabyte.ca>`__
* `goso.ga <https://goso.ga/search>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=goso.ga>`__
* `gruble.de <https://www.gruble.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.gruble.de>`__
* `haku.ahmia.fi <https://haku.ahmia.fi/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=haku.ahmia.fi&latest>`__
* `haku.lelux.fi <https://haku.lelux.fi/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=haku.lelux.fi>`__
* `huyo.me <https://huyo.me/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=huyo.me>`__
* `jsearch.pw <https://jsearch.pw>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=jsearch.pw>`__
* `le-dahut.com/searx <https://le-dahut.com/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=le-dahut.com/searx>`__
* `mijisou.com <https://mijisou.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=mijisou.com>`__
* `null.media <https://null.media>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=null.media>`__
* `openworlds.info <https://openworlds.info/>`__ - Issuer: Let's Encrypt
* `perfectpixel.de/searx/ <https://www.perfectpixel.de/searx/>`__ - Issuer: LetsEncrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.perfectpixel.de>`__
* `ransack.i2p <http://ransack.i2p/>`__ - I2P eepsite, only accessible with `I2P <https://geti2p.net/>`__ (`base32 address <http://mqamk4cfykdvhw5kjez2gnvse56gmnqxn7vkvvbuor4k4j2lbbnq.b32.i2p>`__)
* `rapu.nz <https://rapu.nz/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=rapu.nz>`__
* `roflcopter.fr <https://wtf.roflcopter.fr/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=wtf.roflcopter.fr>`__
* `roteserver.de/searx <https://roteserver.de/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=roteserver.de>`__
* `s.cmd.gg <https://s.cmd.gg>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=s.cmd.gg>`__
* `search.activemail.de <https://search.activemail.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.activemail.de&latest>`__
* `search.anonymize.com <https://search.anonymize.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.anonymize.com>`__
* `search.azkware.net <https://search.azkware.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.azkware.net>`__
* `search.biboumail.fr <https://search.biboumail.fr/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.biboumail.fr>`__
* `search.blankenberg.eu <https://search.blankenberg.eu>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.blankenberg.eu>`__
* `search.d4networks.com <https://search.d4networks.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.d4networks.com>`__
* `search.datensturm.net <https://search.datensturm.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.datensturm.net>`__
* `search.disroot.org <https://search.disroot.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.disroot.org>`__
* `search.ethibox.fr <https://search.ethibox.fr>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.ethibox.fr>`__
* `search.fossdaily.xyz <https://search.fossdaily.xyz>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.fossdaily.xyz>`__
* `search.galaxy.cat <https://search.galaxy.cat>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.galaxy.cat>`__
* `search.gibberfish.org <https://search.gibberfish.org/>`__ (as `Hidden Service <http://o2jdk5mdsijm2b7l.onion/>`__ or `Proxied through Tor <https://search.gibberfish.org/tor/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.gibberfish.org>`__
* `search.koehn.com <https://search.koehn.com>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.koehn.com>`__
* `search.lgbtq.cool <https://search.lgbtq.cool/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.lgbtq.cool>`__
* `search.mdosch.de <https://search.mdosch.de/>`__ (as `Hidden Service <http://search.4bkxscubgtxwvhpe.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.mdosch.de>`__
* `search.modalogi.com <https://search.modalogi.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.modalogi.com&latest>`__
* `search.moravit.com <https://search.moravit.com>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.moravit.com>`__
* `search.nebulacentre.net <https://search.nebulacentre.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.nebulacentre.net>`__
* `search.paulla.asso.fr <https://search.paulla.asso.fr/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.paulla.asso.fr>`__
* `search.pifferi.info <https://search.pifferi.info/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.pifferi.info&latest>`__
* `search.poal.co <https://search.poal.co/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.poal.co>`__
* `search.privacytools.io <https://search.privacytools.io/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.privacytools.io>`__ - Uses Matomo for user tracking and analytics
* `search.seds.nl <https://search.seds.nl/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.seds.nl&latest>`__
* `search.snopyta.org <https://search.snopyta.org/>`__ (as `Hidden Service <http://juy4e6eicawzdrz7.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.snopyta.org>`__
* `search.spaeth.me <https://search.spaeth.me/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.spaeth.me&latest>`__
* `search.st8.at <https://search.st8.at/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.st8.at>`__
* `search.stinpriza.org <https://search.stinpriza.org>`__ (as `Hidden Service <http://z5vawdol25vrmorm4yydmohsd4u6rdoj2sylvoi3e3nqvxkvpqul7bqd.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.stinpriza.org&hideResults=on>`__
* `search.sudo-i.net <https://search.sudo-i.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.sudo-i.net>`__
* `search.tolstoevsky.ml <https://search.tolstoevsky.ml>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.tolstoevsky.ml>`__
* `searchsin.com/searx <https://searchsin.com/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searchsin.com/searx>`__
* `searx.anongoth.pl <https://searx.anongoth.pl>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.anongoth.pl&latest>`__
* `searx.be <https://searx.be>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.be>`__
* `searx.ca <https://searx.ca/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ca>`__
* `searx.canox.net <https://searx.canox.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.canox.net>`__
* `searx.cybt.de <https://searx.cybt.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.cybt.de>`__
* `searx.de <https://www.searx.de/>`__ - Issuer: COMODO `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.de>`__
* `searx.decatec.de <https://searx.decatec.de>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.decatec.de>`__
* `searx.devol.it <https://searx.devol.it/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=sears.devol.it>`__
* `searx.dnswarden.com <https://searx.dnswarden.com>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.dnswarden.com>`__
* `searx.drakonix.net <https://searx.drakonix.net/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.drakonix.net>`__
* `searx.dresden.network <https://searx.dresden.network/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.dresden.network>`__
* `searx.elukerio.org <https://searx.elukerio.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.elukerio.org/>`__
* `searx.everdot.org <https://searx.everdot.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.everdot.org/>`__ - Crawls using YaCy
* `searx.foo.li <https://searx.foo.li>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.foo.li&hideResults=on>`__
* `searx.fossencdi.org <https://searx.fossencdi.org>`__ (as `Hidden Service <http://searx.cwuzdtzlubq5uual.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.fossencdi.org>`__
* `searx.fr32k.de <https://searx.fr32k.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.fr32k.de>`__
* `searx.good.one.pl <https://searx.good.one.pl>`__ (as `Hidden Service <http://searxl7u2y6gvonm.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.good.one.pl>`__
* `searx.gotrust.de <https://searx.gotrust.de/>`__ (as `Hidden Service <http://nxhhwbbxc4khvvlw.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.gotrust.de>`__
* `searx.hardwired.link <https://searx.hardwired.link/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.hardwired.link>`__
* `searx.hlfh.space <https://searx.hlfh.space>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.hlfh.space>`__
* `searx.info <https://searx.info>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.info>`__
* `searx.itunix.eu <https://searx.itunix.eu/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.itunix.eu>`__
* `searx.john-at-me.net <https://searx.john-at-me.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.john-at-me.net>`__
* `searx.kvch.me <https://searx.kvch.me>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.kvch.me>`__
* `searx.laquadrature.net <https://searx.laquadrature.net>`__ (as `Hidden Service <http://searchb5a7tmimez.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.laquadrature.net>`__
* `searx.lelux.fi <https://searx.lelux.fi/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=haku.lelux.fi>`__
* `searx.lhorn.de <https://searx.lhorn.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lhorn.de&latest>`__
* `searx.li <https://searx.li/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.li>`__
* `searx.libmail.eu <https://searx.libmail.eu/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.libmail.eu/>`__
* `searx.linux.pizza <https://searx.linux.pizza>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.linux.pizza>`__
* `searx.lynnesbian.space <https://searx.lynnesbian.space/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lynnesbian.space>`__
* `searx.mastodontech.de <https://searx.mastodontech.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.mastodontech.de>`__
* `searx.me <https://searx.me>`__ (as `Hidden Service <http://ulrn6sryqaifefld.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.me>`__
* `searx.mxchange.org <https://searx.mxchange.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.mxchange.org>`__
* `searx.nakhan.net <https://searx.nakhan.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nakhan.net>`__
* `searx.nixnet.xyz <https://searx.nixnet.xyz>`__ (as `Hidden Service <http://searx.l4qlywnpwqsluw65ts7md3khrivpirse744un3x7mlskqauz5pyuzgqd.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nixnet.xyz>`__
* `searx.nnto.net <https://searx.nnto.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nnto.net>`__
* `searx.openhoofd.nl <https://searx.openhoofd.nl/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=openhoofd.nl>`__
* `searx.openpandora.org <https://searx.openpandora.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.openpandora.org&latest>`__
* `searx.operationtulip.com <https://searx.operationtulip.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.operationtulip.com>`__
* `searx.orcadian.net <https://searx.orcadian.net/>`__ - Issuer: Comodo CA Limited `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.orcadian.net>`__
* `searx.ouahpit.info <https://searx.ouahpiti.info/>`__ - Issuer: Let's Encrypt
* `searx.pofilo.fr <https://searx.pofilo.fr/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.pofilo.fr>`__
* `searx.prvcy.eu <https://searx.prvcy.eu/>`__ (as `Hidden Service <http://hmfztxt3pfhevucl.onion/>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.prvcy.eu>`__
* `searx.pwoss.org <https://searx.pwoss.org>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.pwoss.org>`__
* `searx.ro <https://searx.ro/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ro>`__
* `searx.ru <https://searx.ru/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ru>`__
* `searx.solusar.de <https://searx.solusar.de/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.solusar.de>`__
* `searx.targaryen.house <https://searx.targaryen.house/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.targaryen.house>`__
* `searx.tuxcloud.net <https://searx.tuxcloud.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.tuxcloud.net>`__
* `searx.tyil.nl <https://searx.tyil.nl>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.tyil.nl>`__
* `searx.wegeeks.win <https://searx.wegeeks.win>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.wegeeks.win>`__
* `searx.win <https://searx.win/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.win&latest>`__
* `searx.xyz <https://searx.xyz/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.xyz&latest>`__
* `searx.zareldyn.net <https://searx.zareldyn.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.zareldyn.net>`__
* `searx.zdechov.net <https://searx.zdechov.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.zdechov.net>`__
* `searxs.eu <https://www.searxs.eu>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.searxs.eu&hideResults=on>`__
* `seeks.hsbp.org <https://seeks.hsbp.org/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=seeks.hsbp.org>`__ - `PGP signed fingerprints of cert <https://seeks.hsbp.org/cert>`__
* `skyn3t.in/srx <https://skyn3t.in/srx/>`__ - Issuer: Let's Encrypt | onion `hidden service <http://skyn3tb3bas655mw.onion/srx/>`__
* `spot.ecloud.global <https://spot.ecloud.global/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=spot.ecloud.global>`__
* `srx.sx <https://srx.sx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=srx.sx>`__
* `stemy.me/searx <https://stemy.me/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=stemy.me>`__
* `suche.dasnetzundich.de <https://suche.dasnetzundich.de>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=suche.dasnetzundich.de>`__
* `suche.elaon.de <https://suche.elaon.de>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=suche.elaon.de>`__
* `suche.xyzco456vwisukfg.onion <http://suche.xyzco456vwisukfg.onion/>`__
* `suche.uferwerk.org <https://suche.uferwerk.org>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=suche.uferwerk.org>`__
* `timdor.noip.me/searx <https://timdor.noip.me/searx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=timdor.noip.me/searx>`__
* `trovu.komun.org <https://trovu.komun.org>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=trovu.komun.org>`__
* `unmonito.red <https://unmonito.red/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=unmonito.red>`__
* `www.finden.tk <https://www.finden.tk/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.finden.tk>`__
* `zoek.anchel.nl <https://zoek.anchel.nl/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=zoek.anchel.nl>`__
Running in exclusive private walled-gardens
===========================================
These instances run in walled-gardens that exclude some segment of the general
public (e.g. Tor users and users sharing IPs with many other users). Caution:
privacy is also compromised on these sites due to exposure of cleartext traffic
to a third party other than the website operator.
* `intelme.com <https://intelme.com>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=intelme.com>`__
* `search404.io <https://www.search404.io/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search404.io>`__
* `searx.com.au <https://searx.com.au/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.com.au>`__
* `searx.lavatech.top <https://searx.lavatech.top/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lavatech.top>`__
* `searchx.mobi <https://searchx.mobi/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searchx.mobi>`__
* `searx.org <https://searx.org/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.org>`__
* `searx.run <https://searx.run/>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.run>`__
* `searx.world <https://searx.world>`__ - Issuer: Cloudflare `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.world>`__ - Adds Amazon affiliate links
Running with an incorrect SSL certificate
=========================================
* `listi.me <https://listi.me/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=listi.me&latest>`__
* `s.matejc.com <https://s.matejc.com/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=s.matejc.com>`__
* `search.jollausers.de <https://search.jollausers.de>`__ - Incorrectly configured `SSL certificate <https://www.ssllabs.com/ssltest/analyze.html?d=search.jollausers.de>`__
* `search.paviro.de <https://search.paviro.de>`__ - Issuer: LetsEncrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.paviro.de>`__
* `searx.abenthung.it <https://searx.abenthung.it/>`__ - Issuer: Comodo CA Limited `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.abenthung.it>`__
* `searx.coding4schoki.org <https://searx.coding4schoki.org/>`__ - Incorrectly configured `SSL Certificate <https://www.ssllabs.com/ssltest/analyze.html?d=searx.coding4schoki.org>`__
* `searx.haxors.club <https://searx.haxors.club/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.haxors.club>`__
* `searx.nulltime.net <https://searx.nulltime.net/>`__ (as `Hidden Service <http://searx7gwtu5rh6wr.onion>`__) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nulltime.net>`__
* `searx.ch <https://searx.ch/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ch>`__ (cert clock problems)
Offline
=======
* `a.searx.space <https://a.searx.space>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=a.searx.space>`__ (unstable, under construction).
* `anyonething.de <https://anyonething.de>`__ - (was found to have become a pastebin on or before 2019-03-01) Issuer: Comodo CA Limited (Warning: uses Cloudflare) `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=anyonething.de>`__
* `h7jwxg5rakyfvikpi.onion <http://7jwxg5rakyfvikpi.onion/>`__ - available only as Tor Hidden Service (down on 2019-06-26)
* `hacktivis.me/searx <https://hacktivis.me/searx>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=hacktivis.me/searx>`__
* `icebal.com <https://icebal.com>`__ - (down) Issuer: Let's Encrypt
* `netrangler.host <https://netrangler.host>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=netrangler.host>`__
* `opengo.nl <https://www.opengo.nl>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.opengo.nl>`__
* `p9e.de <https://p9e.de/>`__ - (down - timeout) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=p9e.de>`__
* `rubri.co <https://rubri.co>`__ - (down) Issuer: Let's Encrypt
* `s.bacafe.xyz <https://s.bacafe.xyz/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=s.bacafe.xyz&latest>`__
* `search.alecpap.com <https://search.alecpap.com/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.alecpap.com>`__
* `search.blackit.de <https://search.blackit.de/>`__ - (down) Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.blackit.de>`__
* `search.deblan.org <https://search.deblan.org/>`__ (down) - Issuer: COMODO via GANDI `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.deblan.org>`__
* `search.homecomputing.fr <https://search.homecomputing.fr/>`__ - (down) Issuer: CAcert `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.homecomputing.fr>`__
* `search.jpope.org <https://search.jpope.org>`__ - (down - timeout) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.jpope.org>`__
* `search.kakise.xyz <https://search.kakise.xyz/>`__ - down
* `search.kosebamse.com <https://search.kosebamse.com>`__ - Issuer: LetsEncrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.kosebamse.com>`__
* `search.kujiu.org <https://search.kujiu.org>`__ - (down) Issuer: Let's Encrypt
* `search.mailaender.coffee <https://search.mailaender.coffee/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.mailaender.coffee>`__
* `search.matrix.ac <https://search.matrix.ac>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=matrix.ac>`__
* `search.mypsc.ca <https://search.mypsc.ca/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.mypsc.ca>`__
* `search.namedkitten.pw <https://search.namedkitten.pw>`__ - (SSL error) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.namedkitten.pw>`__
* `search.opentunisia.org <https://search.opentunisia.org>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.opentunisia.org>`__
* `search.r3d007.com <https://search.r3d007.com/>`__ - (down) Issuer: Let's Encrypt
* `search.static.lu <https://search.static.lu/>`__ - (down) Issuer: StartCom `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.static.lu>`__
* `search.teej.xyz <https://search.teej.xyz>`__ - (down) Issuer: LetsEncrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.teej.xyz>`__
* `search.wxzm.sx <https://search.wxzm.sx>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=search.wxzm.sx>`__
* `searx.4ray.co <https://searx.4ray.co/>`__ - (no longer an instance, redirects to main page) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.4ray.co>`__
* `searx.32bitflo.at <https://searx.32bitflo.at/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.32bitflo.at>`__
* `searx.ahh.si <https://searx.ahh.si/>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.ahh.si>`__
* `searx.angristan.xyz <https://searx.angristan.xyz/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.angristan.xyz>`__
* `searx.antirep.net <https://searx.antirep.net/>`__ - (return a 502 HTTP error) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.antirep.net>`__
* `searx.aquilenet.fr <https://searx.aquilenet.fr/>`__ - (down - 429 HTTP error) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.aquilenet.fr>`__
* `searx.at <https://searx.at/>`__ - (return "request exception" at every search) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.at>`__
* `searx.cc <https://searx.cc/>`__ - (down on 2019-06-26) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.cc>`__
* `searx.dk <https://searx.dk/>`__ - (down - 429 HTTP error) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.dk>`__
* `searx.ehrmanns.ch <https://searx.ehrmanns.ch>`__ - (down) Issuer: Let's Encrypt
* `searx.glibre.net <https://searx.glibre.net>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.glibre.net>`__
* `searx.infini.fr <https://searx.infini.fr>`__ - (return a page stating that the website is not installed) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.infini.fr>`__
* `searx.jeanphilippemorvan.info <https://searx.jeanphilippemorvan.info/>`__ - (down) Issuer: StartCom `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.jeanphilippemorvan.info>`__
* `searx.lhorn.de <https://searx.lhorn.de/>`__ - (redirect the Searx's github repository page) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lhorn.de&latest>`__ (only reachable from european countries)
* `searx.lvweb.host <https://searx.lvweb.host>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.lvweb.host>`__
* `searx.mrtino.eu <https://searx.mrtino.eu>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.mrtino.eu>`__
* `searx.netzspielplatz.de <https://searx.netzspielplatz.de/>`__ - (error page about GDPR even when browsing it from USA and Asia) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.netzspielplatz.de>`__
* `searx.new-admin.net <https://searx.new-admin.net>`__ - (down) Issuer: Let's Encrypt
* `searx.nogafa.org <https://searx.nogafa.org/>`__ - (broken CSS) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.nogafa.org>`__
* `searx.potato.hu <https://searx.potato.hu>`__ - (not a searx instance) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.potato.hu>`__
* `searx.rubbeldiekatz.info <https://searx.rubbeldiekatz.info/>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.rubbeldiekatz.info/>`__
* `searx.s42.space <https://searx.s42.space>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.s42.space>`__
* `searx.salcay.hu <https://searx.salcay.hu/>`__ - (down - blank page) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.salcay.hu>`__
* `searx.selea.se <https://searx.selea.se>`__ - (Leads to default Apache page) Issuer: RapidSSL (HSTS preloaded, DNSSEC) `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.selea.se>`__ | `HSTS Preload <https://hstspreload.org/?domain=searx.selea.se>`__
* `searx.steinscraft.net <https://searx.steinscraft.net/>`__ - (down) Issuer: Cloudflare
* `searx.techregion.de <https://searx.techregion.de/>`__ - (domain expired) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.techregion.de>`__
* `searx.tognella.com <https://searx.tognella.com/>`__ - (down) Issuer: Cloudflare
* `searx.xi.ht <https://searx.xi.ht/>`__ - (return a 502 HTTP error) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searx.xi.ht>`__
* `searxist.com <https://searxist.com/>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=searxist.com>`__
* `so.sb <https://so.sb/>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=so.sb>`__
* `srx.stdout.net <https://srx.stdout.net/>`__ - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=srx.stdout.net>`__
* `w6f7cgdm54cyvohcuhraaafhajctyj3ihenrovuxogoagrr5g43qmoid.onion <http://w6f7cgdm54cyvohcuhraaafhajctyj3ihenrovuxogoagrr5g43qmoid.onion/>`__ - Hidden Service
* `win8linux.nohost.me <https://win8linux.nohost.me/searx/>`__ - (down) Issuer: Let's Encrypt
* `wiznet.tech <https://wiznet.tech>`__ - (down) - Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=wiznet.tech>`__
* `www.mercurius.space <https://www.mercurius.space/>`__ - (down) Issuer: Let's Encrypt
* `www.ready.pm <https://www.ready.pm>`__ - Issuer: WoSign `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=www.ready.pm>`__
* `z.awsmppl.com <https://z.awsmppl.com>`__ - (down) Issuer: Let's Encrypt `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=z.awsmppl.com>`__
* `zlsdzh.tk <https://zlsdzh.tk>`__ - (down - 404 HTTP error) Issuer: TrustAsia Technologies, Inc. `Verification <https://www.ssllabs.com/ssltest/analyze.html?d=zlsdzh.tk>`__ *

View file

@ -11,3 +11,6 @@ transifex-client==0.12.2
unittest2==1.1.0
zope.testrunner==4.5.1
selenium==3.141.0
linuxdoc @ git+http://github.com/return42/linuxdoc.git
sphinx-jinja
sphinx-tabs

View file

@ -89,8 +89,7 @@ def response(resp):
'content': content})
try:
result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]/text()'))
result_len_container = utils.to_string(result_len_container)
result_len_container = "".join(eval_xpath(dom, '//span[@class="sb_count"]//text()'))
if "-" in result_len_container:
# Remove the part "from-to" for paginated request ...
result_len_container = result_len_container[result_len_container.find("-") * 2 + 2:]
@ -102,7 +101,7 @@ def response(resp):
logger.debug('result error :\n%s', e)
pass
if _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len:
if result_len and _get_offset_from_pageno(resp.search_params.get("pageno", 0)) > result_len:
return []
results.append({'number_of_results': result_len})

View file

@ -109,14 +109,22 @@ def response(resp):
else:
url = build_flickr_url(photo['ownerNsid'], photo['id'])
results.append({'url': url,
'title': title,
'img_src': img_src,
'thumbnail_src': thumbnail_src,
'content': content,
'author': author,
'source': source,
'img_format': img_format,
'template': 'images.html'})
result = {
'url': url,
'img_src': img_src,
'thumbnail_src': thumbnail_src,
'source': source,
'img_format': img_format,
'template': 'images.html'
}
try:
result['author'] = author
result['title'] = title
result['content'] = content
except:
result['author'] = ''
result['title'] = ''
result['content'] = ''
results.append(result)
return results

View file

@ -32,7 +32,7 @@ base_url = 'https://www.ina.fr'
search_url = base_url + '/layout/set/ajax/recherche/result?autopromote=&hf={ps}&b={start}&type=Video&r=&{query}'
# specific xpath variables
results_xpath = '//div[contains(@class,"search-results--list")]/div[@class="media"]'
results_xpath = '//div[contains(@class,"search-results--list")]//div[@class="media-body"]'
url_xpath = './/a/@href'
title_xpath = './/h3[@class="h3--title media-heading"]'
thumbnail_xpath = './/img/@src'
@ -65,8 +65,11 @@ def response(resp):
videoid = result.xpath(url_xpath)[0]
url = base_url + videoid
title = p.unescape(extract_text(result.xpath(title_xpath)))
thumbnail = extract_text(result.xpath(thumbnail_xpath)[0])
if thumbnail[0] == '/':
try:
thumbnail = extract_text(result.xpath(thumbnail_xpath)[0])
except:
thumbnail = ''
if thumbnail and thumbnail[0] == '/':
thumbnail = base_url + thumbnail
d = extract_text(result.xpath(publishedDate_xpath)[0])
d = d.split('/')

View file

@ -45,6 +45,8 @@ def request(query, params):
def response(resp):
results = []
response_data = loads(resp.text)
if not response_data:
return results
for result in response_data['results']:
url = _get_url(result)

View file

@ -29,7 +29,7 @@ def request(query, params):
params['url'] = search_url
params['method'] = 'POST'
params['headers']['Content-type'] = "application/json"
params['data'] = dumps({"query": query,
params['data'] = dumps({"query": query.decode('utf-8'),
"searchField": "ALL",
"sortDirection": "ASC",
"sortOrder": "RELEVANCY",

View file

@ -12,10 +12,14 @@
from json import loads
from searx.url_utils import urlencode
import requests
import base64
# engine dependent config
categories = ['music']
paging = True
api_client_id = None
api_client_secret = None
# search-url
url = 'https://api.spotify.com/'
@ -31,6 +35,16 @@ def request(query, params):
params['url'] = search_url.format(query=urlencode({'q': query}), offset=offset)
r = requests.post(
'https://accounts.spotify.com/api/token',
data={'grant_type': 'client_credentials'},
headers={'Authorization': 'Basic ' + base64.b64encode(
"{}:{}".format(api_client_id, api_client_secret).encode('utf-8')
).decode('utf-8')}
)
j = loads(r.text)
params['headers'] = {'Authorization': 'Bearer {}'.format(j.get('access_token'))}
return params

View file

@ -79,9 +79,10 @@ engines:
categories : science
timeout : 4.0
- name : base
engine : base
shortcut : bs
# tmp suspended: dh key too small
# - name : base
# engine : base
# shortcut : bs
- name : wikipedia
engine : wikipedia
@ -552,10 +553,11 @@ engines:
timeout : 10.0
disabled : True
- name : scanr structures
shortcut: scs
engine : scanr_structures
disabled : True
# tmp suspended: bad certificate
# - name : scanr structures
# shortcut: scs
# engine : scanr_structures
# disabled : True
- name : soundcloud
engine : soundcloud
@ -598,9 +600,12 @@ engines:
shortcut : se
categories : science
- name : spotify
engine : spotify
shortcut : stf
# Spotify needs API credentials
# - name : spotify
# engine : spotify
# shortcut : stf
# api_client_id : *******
# api_client_secret : *******
- name : startpage
engine : startpage

View file

@ -15,7 +15,68 @@
{% include 'oscar/search.html' %}
<div class="row">
<div class="col-sm-8" id="main_results">
<div class="col-sm-4 col-sm-push-8" id="sidebar_results">
{% if number_of_results != '0' -%}
<p><small>{{ _('Number of results') }}: {{ number_of_results }}</small></p>
{%- endif %}
{% if unresponsive_engines and results|length >= 1 -%}
<div class="alert alert-danger fade in" role="alert">
<p>{{ _('Engines cannot retrieve results') }}:</p>
{%- for engine_name, error_type in unresponsive_engines -%}
{{- engine_name }} ({{ error_type }}){% if not loop.last %}, {% endif %}{{- "" -}}
{%- endfor -%}
</div>
{%- endif %}
{% if infoboxes -%}
{% for infobox in infoboxes %}
{% include 'oscar/infobox.html' %}{{- "\n\n" -}}
{% endfor %}
{%- endif %}
{% if suggestions %}
<div class="panel panel-default">
<div class="panel-heading">
<h4 class="panel-title">{{ _('Suggestions') }}</h4>
</div>
<div class="panel-body">
{% for suggestion in suggestions %}
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} suggestion_item">
<input type="hidden" name="q" value="{{ suggestion.url }}">
<button type="submit" class="btn btn-default btn-xs">{{ suggestion.title }}</button>
</form>
{% endfor %}
</div>
</div>
{%- endif %}
<div class="panel panel-default">
<div class="panel-heading">{{- "" -}}
<h4 class="panel-title">{{ _('Links') }}</h4>{{- "" -}}
</div>
<div class="panel-body">
<form role="form">{{- "" -}}
<div class="form-group">{{- "" -}}
<label for="search_url">{{ _('Search URL') }}</label>{{- "" -}}
<input id="search_url" type="url" class="form-control select-all-on-click cursor-text" name="search_url" value="{{ search_url() }}" readonly>{{- "" -}}
</div>{{- "" -}}
</form>
<label>{{ _('Download results') }}</label>
<div class="clearfix"></div>
{% for output_type in ('csv', 'json', 'rss') %}
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} result_download">
{{- search_form_attrs(pageno) -}}
<input type="hidden" name="format" value="{{ output_type }}">{{- "" -}}
<button type="submit" class="btn btn-default">{{ output_type }}</button>{{- "" -}}
</form>
{% endfor %}
<div class="clearfix"></div>
</div>
</div>
</div><!-- /#sidebar_results -->
<div class="col-sm-8 col-sm-pull-4" id="main_results">
<h1 class="sr-only">{{ _('Search results') }}</h1>
{% if corrections -%}
@ -91,66 +152,5 @@
{% endif %}
{% endif %}
</div><!-- /#main_results -->
<div class="col-sm-4" id="sidebar_results">
{% if number_of_results != '0' -%}
<p><small>{{ _('Number of results') }}: {{ number_of_results }}</small></p>
{%- endif %}
{% if unresponsive_engines and results|length >= 1 -%}
<div class="alert alert-danger fade in" role="alert">
<p>{{ _('Engines cannot retrieve results') }}:</p>
{%- for engine_name, error_type in unresponsive_engines -%}
{{- engine_name }} ({{ error_type }}){% if not loop.last %}, {% endif %}{{- "" -}}
{%- endfor -%}
</div>
{%- endif %}
{% if infoboxes -%}
{% for infobox in infoboxes %}
{% include 'oscar/infobox.html' %}{{- "\n\n" -}}
{% endfor %}
{%- endif %}
{% if suggestions %}
<div class="panel panel-default">
<div class="panel-heading">
<h4 class="panel-title">{{ _('Suggestions') }}</h4>
</div>
<div class="panel-body">
{% for suggestion in suggestions %}
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" role="navigation" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} suggestion_item">
<input type="hidden" name="q" value="{{ suggestion.url }}">
<button type="submit" class="btn btn-default btn-xs">{{ suggestion.title }}</button>
</form>
{% endfor %}
</div>
</div>
{%- endif %}
<div class="panel panel-default">
<div class="panel-heading">{{- "" -}}
<h4 class="panel-title">{{ _('Links') }}</h4>{{- "" -}}
</div>
<div class="panel-body">
<form role="form">{{- "" -}}
<div class="form-group">{{- "" -}}
<label for="search_url">{{ _('Search URL') }}</label>{{- "" -}}
<input id="search_url" type="url" class="form-control select-all-on-click cursor-text" name="search_url" value="{{ search_url() }}" readonly>{{- "" -}}
</div>{{- "" -}}
</form>
<label>{{ _('Download results') }}</label>
<div class="clearfix"></div>
{% for output_type in ('csv', 'json', 'rss') %}
<form method="{{ method or 'POST' }}" action="{{ url_for('index') }}" class="form-inline pull-{% if rtl %}right{% else %}left{% endif %} result_download">
{{- search_form_attrs(pageno) -}}
<input type="hidden" name="format" value="{{ output_type }}">{{- "" -}}
<button type="submit" class="btn btn-default">{{ output_type }}</button>{{- "" -}}
</form>
{% endfor %}
<div class="clearfix"></div>
</div>
</div>
</div><!-- /#sidebar_results -->
</div>
{% endblock %}

View file

@ -198,6 +198,7 @@ $(GH_PAGES)::
$(MAKE) docs
[ -d "gh-pages/.git" ] || git clone $(GIT_URL) gh-pages
-cd $(GH_PAGES); git checkout gh-pages >/dev/null
-cd $(GH_PAGES); git pull
-cd $(GH_PAGES); ls -A | grep -v '.git$$' | xargs rm -rf
cp -r $(DOCS_DIST)/* $(GH_PAGES)/
touch $(GH_PAGES)/.nojekyll