2021-01-13 10:31:25 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
2021-05-23 08:56:29 +00:00
|
|
|
"""The XPath engine is a *generic* engine with which it is possible to configure
|
|
|
|
engines in the settings.
|
|
|
|
|
2023-06-30 16:07:02 +00:00
|
|
|
.. _XPath selector: https://quickref.me/xpath.html#xpath-selectors
|
|
|
|
|
|
|
|
Configuration
|
|
|
|
=============
|
|
|
|
|
|
|
|
Request:
|
|
|
|
|
|
|
|
- :py:obj:`search_url`
|
|
|
|
- :py:obj:`lang_all`
|
|
|
|
- :py:obj:`soft_max_redirects`
|
2024-11-27 13:13:23 +00:00
|
|
|
- :py:obj:`method`
|
|
|
|
- :py:obj:`request_body`
|
2023-06-30 16:07:02 +00:00
|
|
|
- :py:obj:`cookies`
|
|
|
|
- :py:obj:`headers`
|
|
|
|
|
|
|
|
Paging:
|
|
|
|
|
|
|
|
- :py:obj:`paging`
|
|
|
|
- :py:obj:`page_size`
|
|
|
|
- :py:obj:`first_page_num`
|
|
|
|
|
|
|
|
Time Range:
|
|
|
|
|
|
|
|
- :py:obj:`time_range_support`
|
|
|
|
- :py:obj:`time_range_url`
|
|
|
|
- :py:obj:`time_range_map`
|
|
|
|
|
|
|
|
Safe-Search:
|
|
|
|
|
|
|
|
- :py:obj:`safe_search_support`
|
|
|
|
- :py:obj:`safe_search_map`
|
|
|
|
|
|
|
|
Response:
|
|
|
|
|
|
|
|
- :py:obj:`no_result_for_http_status`
|
|
|
|
|
|
|
|
`XPath selector`_:
|
|
|
|
|
|
|
|
- :py:obj:`results_xpath`
|
|
|
|
- :py:obj:`url_xpath`
|
|
|
|
- :py:obj:`title_xpath`
|
|
|
|
- :py:obj:`content_xpath`
|
|
|
|
- :py:obj:`thumbnail_xpath`
|
|
|
|
- :py:obj:`suggestion_xpath`
|
|
|
|
|
|
|
|
|
|
|
|
Example
|
|
|
|
=======
|
|
|
|
|
|
|
|
Here is a simple example of a XPath engine configured in the :ref:`settings
|
|
|
|
engine` section, further read :ref:`engines-dev`.
|
2021-05-23 08:56:29 +00:00
|
|
|
|
|
|
|
.. code:: yaml
|
|
|
|
|
|
|
|
- name : bitbucket
|
|
|
|
engine : xpath
|
|
|
|
paging : True
|
|
|
|
search_url : https://bitbucket.org/repo/all/{pageno}?name={query}
|
|
|
|
url_xpath : //article[@class="repo-summary"]//a[@class="repo-link"]/@href
|
|
|
|
title_xpath : //article[@class="repo-summary"]//a[@class="repo-link"]
|
|
|
|
content_xpath : //article[@class="repo-summary"]/p
|
|
|
|
|
2023-06-30 16:07:02 +00:00
|
|
|
Implementations
|
|
|
|
===============
|
|
|
|
|
2021-05-23 08:56:29 +00:00
|
|
|
"""
|
2021-01-13 10:31:25 +00:00
|
|
|
|
2020-10-02 16:13:56 +00:00
|
|
|
from urllib.parse import urlencode
|
2021-05-23 08:56:29 +00:00
|
|
|
|
|
|
|
from lxml import html
|
2020-11-26 14:49:33 +00:00
|
|
|
from searx.utils import extract_text, extract_url, eval_xpath, eval_xpath_list
|
2022-09-02 07:33:20 +00:00
|
|
|
from searx.network import raise_for_httperror
|
2013-10-26 00:22:20 +00:00
|
|
|
|
2014-01-20 01:31:20 +00:00
|
|
|
search_url = None
|
2021-05-23 08:56:29 +00:00
|
|
|
"""
|
2023-06-30 16:07:02 +00:00
|
|
|
Search URL of the engine. Example::
|
2021-05-23 20:26:18 +00:00
|
|
|
|
|
|
|
https://example.org/?search={query}&page={pageno}{time_range}{safe_search}
|
|
|
|
|
|
|
|
Replacements are:
|
2021-05-23 08:56:29 +00:00
|
|
|
|
|
|
|
``{query}``:
|
|
|
|
Search terms from user.
|
|
|
|
|
|
|
|
``{pageno}``:
|
2023-09-15 07:53:03 +00:00
|
|
|
Page number if engine supports paging :py:obj:`paging`
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2021-05-23 11:20:48 +00:00
|
|
|
``{lang}``:
|
|
|
|
ISO 639-1 language code (en, de, fr ..)
|
2021-05-23 14:49:30 +00:00
|
|
|
|
|
|
|
``{time_range}``:
|
|
|
|
:py:obj:`URL parameter <time_range_url>` if engine :py:obj:`supports time
|
|
|
|
range <time_range_support>`. The value for the parameter is taken from
|
|
|
|
:py:obj:`time_range_map`.
|
|
|
|
|
2021-05-23 20:26:18 +00:00
|
|
|
``{safe_search}``:
|
|
|
|
Safe-search :py:obj:`URL parameter <safe_search_map>` if engine
|
|
|
|
:py:obj:`supports safe-search <safe_search_support>`. The ``{safe_search}``
|
|
|
|
replacement is taken from the :py:obj:`safes_search_map`. Filter results::
|
|
|
|
|
|
|
|
0: none, 1: moderate, 2:strict
|
|
|
|
|
2022-09-27 15:01:00 +00:00
|
|
|
If not supported, the URL parameter is an empty string.
|
2021-05-23 20:26:18 +00:00
|
|
|
|
2021-05-23 08:56:29 +00:00
|
|
|
"""
|
|
|
|
|
2021-12-27 08:26:22 +00:00
|
|
|
lang_all = 'en'
|
2021-05-23 11:20:48 +00:00
|
|
|
'''Replacement ``{lang}`` in :py:obj:`search_url` if language ``all`` is
|
|
|
|
selected.
|
|
|
|
'''
|
2022-09-02 07:33:20 +00:00
|
|
|
|
|
|
|
no_result_for_http_status = []
|
|
|
|
'''Return empty result for these HTTP status codes instead of throwing an error.
|
|
|
|
|
|
|
|
.. code:: yaml
|
|
|
|
|
|
|
|
no_result_for_http_status: []
|
|
|
|
'''
|
2021-05-23 11:20:48 +00:00
|
|
|
|
2021-05-23 08:56:29 +00:00
|
|
|
soft_max_redirects = 0
|
|
|
|
'''Maximum redirects, soft limit. Record an error but don't stop the engine'''
|
|
|
|
|
|
|
|
results_xpath = ''
|
2023-06-30 16:07:02 +00:00
|
|
|
'''`XPath selector`_ for the list of result items'''
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2014-01-20 01:31:20 +00:00
|
|
|
url_xpath = None
|
2023-06-30 16:07:02 +00:00
|
|
|
'''`XPath selector`_ of result's ``url``.'''
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2013-10-26 00:22:20 +00:00
|
|
|
content_xpath = None
|
2023-06-30 16:07:02 +00:00
|
|
|
'''`XPath selector`_ of result's ``content``.'''
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2014-01-20 01:31:20 +00:00
|
|
|
title_xpath = None
|
2023-06-30 16:07:02 +00:00
|
|
|
'''`XPath selector`_ of result's ``title``.'''
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2019-07-25 05:46:41 +00:00
|
|
|
thumbnail_xpath = False
|
2024-05-12 15:52:52 +00:00
|
|
|
'''`XPath selector`_ of result's ``thumbnail``.'''
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2013-11-13 18:33:09 +00:00
|
|
|
suggestion_xpath = ''
|
2023-06-30 16:07:02 +00:00
|
|
|
'''`XPath selector`_ of result's ``suggestion``.'''
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2016-05-19 05:38:43 +00:00
|
|
|
cached_xpath = ''
|
|
|
|
cached_url = ''
|
2013-10-26 00:22:20 +00:00
|
|
|
|
2022-04-16 15:42:04 +00:00
|
|
|
cookies = {}
|
2023-06-30 16:07:02 +00:00
|
|
|
'''Some engines might offer different result based on cookies.
|
|
|
|
Possible use-case: To set safesearch cookie.'''
|
|
|
|
|
2022-04-16 15:42:04 +00:00
|
|
|
headers = {}
|
2023-06-30 16:07:02 +00:00
|
|
|
'''Some engines might offer different result based headers. Possible use-case:
|
|
|
|
To set header to moderate.'''
|
2022-04-16 15:42:04 +00:00
|
|
|
|
2024-11-27 13:13:23 +00:00
|
|
|
method = 'GET'
|
|
|
|
'''Some engines might require to do POST requests for search.'''
|
|
|
|
|
|
|
|
request_body = ''
|
|
|
|
'''The body of the request. This can only be used if different :py:obj:`method`
|
|
|
|
is set, e.g. ``POST``. For formatting see the documentation of :py:obj:`search_url`::
|
|
|
|
|
|
|
|
search={query}&page={pageno}{time_range}{safe_search}
|
|
|
|
'''
|
|
|
|
|
2021-05-23 08:56:29 +00:00
|
|
|
paging = False
|
|
|
|
'''Engine supports paging [True or False].'''
|
|
|
|
|
2016-03-28 13:15:03 +00:00
|
|
|
page_size = 1
|
2021-05-23 08:56:29 +00:00
|
|
|
'''Number of results on each page. Only needed if the site requires not a page
|
|
|
|
number, but an offset.'''
|
2016-03-28 13:15:03 +00:00
|
|
|
|
2021-05-23 08:56:29 +00:00
|
|
|
first_page_num = 1
|
|
|
|
'''Number of the first page (usually 0 or 1).'''
|
2014-01-20 01:31:20 +00:00
|
|
|
|
2021-05-23 14:49:30 +00:00
|
|
|
time_range_support = False
|
|
|
|
'''Engine supports search time range.'''
|
|
|
|
|
|
|
|
time_range_url = '&hours={time_range_val}'
|
|
|
|
'''Time range URL parameter in the in :py:obj:`search_url`. If no time range is
|
2022-09-27 15:01:00 +00:00
|
|
|
requested by the user, the URL parameter is an empty string. The
|
2021-05-23 14:49:30 +00:00
|
|
|
``{time_range_val}`` replacement is taken from the :py:obj:`time_range_map`.
|
|
|
|
|
|
|
|
.. code:: yaml
|
|
|
|
|
|
|
|
time_range_url : '&days={time_range_val}'
|
|
|
|
'''
|
|
|
|
|
|
|
|
time_range_map = {
|
|
|
|
'day': 24,
|
2021-12-27 08:26:22 +00:00
|
|
|
'week': 24 * 7,
|
|
|
|
'month': 24 * 30,
|
|
|
|
'year': 24 * 365,
|
2021-05-23 14:49:30 +00:00
|
|
|
}
|
|
|
|
'''Maps time range value from user to ``{time_range_val}`` in
|
|
|
|
:py:obj:`time_range_url`.
|
|
|
|
|
|
|
|
.. code:: yaml
|
|
|
|
|
|
|
|
time_range_map:
|
|
|
|
day: 1
|
|
|
|
week: 7
|
|
|
|
month: 30
|
|
|
|
year: 365
|
|
|
|
'''
|
|
|
|
|
2021-05-23 20:26:18 +00:00
|
|
|
safe_search_support = False
|
|
|
|
'''Engine supports safe-search.'''
|
|
|
|
|
2021-12-27 08:26:22 +00:00
|
|
|
safe_search_map = {0: '&filter=none', 1: '&filter=moderate', 2: '&filter=strict'}
|
2021-05-23 20:26:18 +00:00
|
|
|
'''Maps safe-search value to ``{safe_search}`` in :py:obj:`search_url`.
|
|
|
|
|
|
|
|
.. code:: yaml
|
|
|
|
|
|
|
|
safesearch: true
|
|
|
|
safes_search_map:
|
|
|
|
0: '&filter=none'
|
|
|
|
1: '&filter=moderate'
|
|
|
|
2: '&filter=strict'
|
|
|
|
|
|
|
|
'''
|
|
|
|
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2021-12-27 08:26:22 +00:00
|
|
|
def request(query, params):
|
|
|
|
'''Build request parameters (see :ref:`engine request`).'''
|
2021-05-23 11:20:48 +00:00
|
|
|
lang = lang_all
|
|
|
|
if params['language'] != 'all':
|
|
|
|
lang = params['language'][:2]
|
2021-05-23 14:49:30 +00:00
|
|
|
|
|
|
|
time_range = ''
|
|
|
|
if params.get('time_range'):
|
|
|
|
time_range_val = time_range_map.get(params.get('time_range'))
|
|
|
|
time_range = time_range_url.format(time_range_val=time_range_val)
|
|
|
|
|
2021-05-23 20:26:18 +00:00
|
|
|
safe_search = ''
|
|
|
|
if params['safesearch']:
|
|
|
|
safe_search = safe_search_map[params['safesearch']]
|
|
|
|
|
2021-05-23 11:20:48 +00:00
|
|
|
fargs = {
|
|
|
|
'query': urlencode({'q': query})[2:],
|
|
|
|
'lang': lang,
|
2021-05-23 14:49:30 +00:00
|
|
|
'pageno': (params['pageno'] - 1) * page_size + first_page_num,
|
2021-12-27 08:26:22 +00:00
|
|
|
'time_range': time_range,
|
|
|
|
'safe_search': safe_search,
|
2021-05-23 11:20:48 +00:00
|
|
|
}
|
2021-05-23 14:49:30 +00:00
|
|
|
|
2022-04-17 09:29:23 +00:00
|
|
|
params['cookies'].update(cookies)
|
|
|
|
params['headers'].update(headers)
|
2022-04-17 08:42:25 +00:00
|
|
|
|
2021-05-23 08:56:29 +00:00
|
|
|
params['url'] = search_url.format(**fargs)
|
2024-11-27 13:13:23 +00:00
|
|
|
params['method'] = method
|
|
|
|
|
|
|
|
if request_body:
|
|
|
|
# don't url-encode the query if it's in the request body
|
|
|
|
fargs['query'] = query
|
|
|
|
params['data'] = request_body.format(**fargs)
|
2022-09-02 07:33:20 +00:00
|
|
|
|
2024-11-27 13:13:23 +00:00
|
|
|
params['soft_max_redirects'] = soft_max_redirects
|
2022-09-02 07:33:20 +00:00
|
|
|
params['raise_for_httperror'] = False
|
|
|
|
|
2013-10-26 00:22:20 +00:00
|
|
|
return params
|
|
|
|
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2022-09-02 07:33:20 +00:00
|
|
|
def response(resp): # pylint: disable=too-many-branches
|
[refactor] typification of SearXNG (initial) / result items (part 1)
Typification of SearXNG
=======================
This patch introduces the typing of the results. The why and how is described
in the documentation, please generate the documentation ..
$ make docs.clean docs.live
and read the following articles in the "Developer documentation":
- result types --> http://0.0.0.0:8000/dev/result_types/index.html
The result types are available from the `searx.result_types` module. The
following have been implemented so far:
- base result type: `searx.result_type.Result`
--> http://0.0.0.0:8000/dev/result_types/base_result.html
- answer results
--> http://0.0.0.0:8000/dev/result_types/answer.html
including the type for translations (inspired by #3925). For all other
types (which still need to be set up in subsequent PRs), template documentation
has been created for the transition period.
Doc of the fields used in Templates
===================================
The template documentation is the basis for the typing and is the first complete
documentation of the results (needed for engine development). It is the
"working paper" (the plan) with which further typifications can be implemented
in subsequent PRs.
- https://github.com/searxng/searxng/issues/357
Answer Templates
================
With the new (sub) types for `Answer`, the templates for the answers have also
been revised, `Translation` are now displayed with collapsible entries (inspired
by #3925).
!en-de dog
Plugins & Answerer
==================
The implementation for `Plugin` and `Answer` has been revised, see
documentation:
- Plugin: http://0.0.0.0:8000/dev/plugins/index.html
- Answerer: http://0.0.0.0:8000/dev/answerers/index.html
With `AnswerStorage` and `AnswerStorage` to manage those items (in follow up
PRs, `ArticleStorage`, `InfoStorage` and .. will be implemented)
Autocomplete
============
The autocompletion had a bug where the results from `Answer` had not been shown
in the past. To test activate autocompletion and try search terms for which we
have answerers
- statistics: type `min 1 2 3` .. in the completion list you should find an
entry like `[de] min(1, 2, 3) = 1`
- random: type `random uuid` .. in the completion list, the first item is a
random UUID
Extended Types
==============
SearXNG extends e.g. the request and response types of flask and httpx, a module
has been set up for type extensions:
- Extended Types
--> http://0.0.0.0:8000/dev/extended_types.html
Unit-Tests
==========
The unit tests have been completely revised. In the previous implementation,
the runtime (the global variables such as `searx.settings`) was not initialized
before each test, so the runtime environment with which a test ran was always
determined by the tests that ran before it. This was also the reason why we
sometimes had to observe non-deterministic errors in the tests in the past:
- https://github.com/searxng/searxng/issues/2988 is one example for the Runtime
issues, with non-deterministic behavior ..
- https://github.com/searxng/searxng/pull/3650
- https://github.com/searxng/searxng/pull/3654
- https://github.com/searxng/searxng/pull/3642#issuecomment-2226884469
- https://github.com/searxng/searxng/pull/3746#issuecomment-2300965005
Why msgspec.Struct
==================
We have already discussed typing based on e.g. `TypeDict` or `dataclass` in the past:
- https://github.com/searxng/searxng/pull/1562/files
- https://gist.github.com/dalf/972eb05e7a9bee161487132a7de244d2
- https://github.com/searxng/searxng/pull/1412/files
- https://github.com/searxng/searxng/pull/1356
In my opinion, TypeDict is unsuitable because the objects are still dictionaries
and not instances of classes / the `dataclass` are classes but ...
The `msgspec.Struct` combine the advantages of typing, runtime behaviour and
also offer the option of (fast) serializing (incl. type check) the objects.
Currently not possible but conceivable with `msgspec`: Outsourcing the engines
into separate processes, what possibilities this opens up in the future is left
to the imagination!
Internally, we have already defined that it is desirable to decouple the
development of the engines from the development of the SearXNG core / The
serialization of the `Result` objects is a prerequisite for this.
HINT: The threads listed above were the template for this PR, even though the
implementation here is based on msgspec. They should also be an inspiration for
the following PRs of typification, as the models and implementations can provide
a good direction.
Why just one commit?
====================
I tried to create several (thematically separated) commits, but gave up at some
point ... there are too many things to tackle at once / The comprehensibility of
the commits would not be improved by a thematic separation. On the contrary, we
would have to make multiple changes at the same places and the goal of a change
would be vaguely recognizable in the fog of the commits.
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-12-15 08:59:50 +00:00
|
|
|
'''Scrap *results* from the response (see :ref:`result types`).'''
|
2022-09-02 07:33:20 +00:00
|
|
|
if no_result_for_http_status and resp.status_code in no_result_for_http_status:
|
|
|
|
return []
|
|
|
|
|
|
|
|
raise_for_httperror(resp)
|
|
|
|
|
2013-10-26 00:22:20 +00:00
|
|
|
results = []
|
[mod] hardening xpath engine: ignore empty results
A SearXNG maintainer on Matrix reported a traceback::
File "searxng-src/searx/engines/xpath.py", line 272, in response
dom = html.fromstring(resp.text)
^^^^^^^^^^^^^^^^^^^^^^^^^^
File "searx-pyenv/lib/python3.11/site-packages/lxml/html/__init__.py", line 850, in fromstring
doc = document_fromstring(html, parser=parser, base_url=base_url, **kw)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "searx-pyenv/lib/python3.11/site-packages/lxml/html/__init__.py", line 738, in document_fromstring
raise etree.ParserError(
lxml.etree.ParserError: Document is empty
I don't have an example to reproduce the issue, but the issue and this patch are
clearly recognizable even without an example.
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
2024-11-29 11:24:25 +00:00
|
|
|
|
|
|
|
if not resp.text:
|
|
|
|
return results
|
|
|
|
|
2013-10-26 00:22:20 +00:00
|
|
|
dom = html.fromstring(resp.text)
|
2021-09-07 08:29:38 +00:00
|
|
|
is_onion = 'onions' in categories
|
2016-05-19 05:38:43 +00:00
|
|
|
|
2013-10-26 11:45:43 +00:00
|
|
|
if results_xpath:
|
2020-11-26 14:49:33 +00:00
|
|
|
for result in eval_xpath_list(dom, results_xpath):
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2020-11-26 14:49:33 +00:00
|
|
|
url = extract_url(eval_xpath_list(result, url_xpath, min_len=1), search_url)
|
|
|
|
title = extract_text(eval_xpath_list(result, title_xpath, min_len=1))
|
2021-09-04 10:41:23 +00:00
|
|
|
content = extract_text(eval_xpath_list(result, content_xpath))
|
2019-07-25 05:46:41 +00:00
|
|
|
tmp_result = {'url': url, 'title': title, 'content': content}
|
|
|
|
|
|
|
|
# add thumbnail if available
|
|
|
|
if thumbnail_xpath:
|
2020-11-26 14:49:33 +00:00
|
|
|
thumbnail_xpath_result = eval_xpath_list(result, thumbnail_xpath)
|
2019-07-25 07:31:47 +00:00
|
|
|
if len(thumbnail_xpath_result) > 0:
|
2024-05-12 15:52:52 +00:00
|
|
|
tmp_result['thumbnail'] = extract_url(thumbnail_xpath_result, search_url)
|
2019-07-25 05:46:41 +00:00
|
|
|
|
2016-05-19 05:38:43 +00:00
|
|
|
# add alternative cached url if available
|
|
|
|
if cached_xpath:
|
2021-12-27 08:26:22 +00:00
|
|
|
tmp_result['cached_url'] = cached_url + extract_text(eval_xpath_list(result, cached_xpath, min_len=1))
|
2016-05-19 05:38:43 +00:00
|
|
|
|
|
|
|
if is_onion:
|
|
|
|
tmp_result['is_onion'] = True
|
|
|
|
|
2019-07-25 05:46:41 +00:00
|
|
|
results.append(tmp_result)
|
2021-05-23 08:56:29 +00:00
|
|
|
|
2013-10-26 11:45:43 +00:00
|
|
|
else:
|
2016-05-19 05:38:43 +00:00
|
|
|
if cached_xpath:
|
|
|
|
for url, title, content, cached in zip(
|
2021-12-27 08:26:22 +00:00
|
|
|
(extract_url(x, search_url) for x in eval_xpath_list(dom, url_xpath)),
|
2020-11-26 14:49:33 +00:00
|
|
|
map(extract_text, eval_xpath_list(dom, title_xpath)),
|
|
|
|
map(extract_text, eval_xpath_list(dom, content_xpath)),
|
2021-12-27 08:26:22 +00:00
|
|
|
map(extract_text, eval_xpath_list(dom, cached_xpath)),
|
2016-05-19 05:38:43 +00:00
|
|
|
):
|
2021-12-27 08:26:22 +00:00
|
|
|
results.append(
|
|
|
|
{
|
|
|
|
'url': url,
|
|
|
|
'title': title,
|
|
|
|
'content': content,
|
|
|
|
'cached_url': cached_url + cached,
|
|
|
|
'is_onion': is_onion,
|
|
|
|
}
|
|
|
|
)
|
2016-05-19 05:38:43 +00:00
|
|
|
else:
|
|
|
|
for url, title, content in zip(
|
2021-12-27 08:26:22 +00:00
|
|
|
(extract_url(x, search_url) for x in eval_xpath_list(dom, url_xpath)),
|
2020-11-26 14:49:33 +00:00
|
|
|
map(extract_text, eval_xpath_list(dom, title_xpath)),
|
2021-12-27 08:26:22 +00:00
|
|
|
map(extract_text, eval_xpath_list(dom, content_xpath)),
|
2016-05-19 05:38:43 +00:00
|
|
|
):
|
2021-12-27 08:26:22 +00:00
|
|
|
results.append({'url': url, 'title': title, 'content': content, 'is_onion': is_onion})
|
2021-05-23 08:56:29 +00:00
|
|
|
|
|
|
|
if suggestion_xpath:
|
|
|
|
for suggestion in eval_xpath(dom, suggestion_xpath):
|
|
|
|
results.append({'suggestion': extract_text(suggestion)})
|
|
|
|
|
|
|
|
logger.debug("found %s results", len(results))
|
2013-10-26 00:22:20 +00:00
|
|
|
return results
|