mirror of
https://github.com/bookwyrm-social/bookwyrm.git
synced 2024-11-23 01:51:08 +00:00
Move the search request logic into the AbstractConnector to allow for more flexibility
Signed-off-by: Jacob Torrey <jacob@jacobtorrey.com>
This commit is contained in:
parent
d35b1d91ba
commit
797d339132
2 changed files with 38 additions and 36 deletions
|
@ -4,6 +4,8 @@ from urllib.parse import quote_plus
|
||||||
import imghdr
|
import imghdr
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
import aiohttp
|
||||||
|
import asyncio
|
||||||
|
|
||||||
from django.core.files.base import ContentFile
|
from django.core.files.base import ContentFile
|
||||||
from django.db import transaction
|
from django.db import transaction
|
||||||
|
@ -11,6 +13,7 @@ import requests
|
||||||
from requests.exceptions import RequestException
|
from requests.exceptions import RequestException
|
||||||
|
|
||||||
from bookwyrm import activitypub, models, settings
|
from bookwyrm import activitypub, models, settings
|
||||||
|
from bookwyrm.settings import SEARCH_TIMEOUT, USER_AGENT
|
||||||
from .connector_manager import load_more_data, ConnectorException, raise_not_valid_url
|
from .connector_manager import load_more_data, ConnectorException, raise_not_valid_url
|
||||||
from .format_mappings import format_mappings
|
from .format_mappings import format_mappings
|
||||||
|
|
||||||
|
@ -57,6 +60,40 @@ class AbstractMinimalConnector(ABC):
|
||||||
return list(self.parse_isbn_search_data(data))[:10]
|
return list(self.parse_isbn_search_data(data))[:10]
|
||||||
return list(self.parse_search_data(data, min_confidence))[:10]
|
return list(self.parse_search_data(data, min_confidence))[:10]
|
||||||
|
|
||||||
|
async def get_results(self, session, url, min_confidence, query):
|
||||||
|
"""try this specific connector"""
|
||||||
|
# pylint: disable=line-too-long
|
||||||
|
headers = {
|
||||||
|
"Accept": (
|
||||||
|
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
|
||||||
|
),
|
||||||
|
"User-Agent": USER_AGENT,
|
||||||
|
}
|
||||||
|
params = {"min_confidence": min_confidence}
|
||||||
|
try:
|
||||||
|
async with session.get(url, headers=headers, params=params) as response:
|
||||||
|
if not response.ok:
|
||||||
|
logger.info("Unable to connect to %s: %s", url, response.reason)
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
raw_data = await response.json()
|
||||||
|
except aiohttp.client_exceptions.ContentTypeError as err:
|
||||||
|
logger.exception(err)
|
||||||
|
return
|
||||||
|
|
||||||
|
return {
|
||||||
|
"connector": self,
|
||||||
|
"results": self.process_search_response(
|
||||||
|
query, raw_data, min_confidence
|
||||||
|
),
|
||||||
|
}
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
logger.info("Connection timed out for url: %s", url)
|
||||||
|
except aiohttp.ClientError as err:
|
||||||
|
logger.info(err)
|
||||||
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_or_create_book(self, remote_id):
|
def get_or_create_book(self, remote_id):
|
||||||
"""pull up a book record by whatever means possible"""
|
"""pull up a book record by whatever means possible"""
|
||||||
|
|
|
@ -21,41 +21,6 @@ logger = logging.getLogger(__name__)
|
||||||
class ConnectorException(HTTPError):
|
class ConnectorException(HTTPError):
|
||||||
"""when the connector can't do what was asked"""
|
"""when the connector can't do what was asked"""
|
||||||
|
|
||||||
|
|
||||||
async def get_results(session, url, min_confidence, query, connector):
|
|
||||||
"""try this specific connector"""
|
|
||||||
# pylint: disable=line-too-long
|
|
||||||
headers = {
|
|
||||||
"Accept": (
|
|
||||||
'application/json, application/activity+json, application/ld+json; profile="https://www.w3.org/ns/activitystreams"; charset=utf-8'
|
|
||||||
),
|
|
||||||
"User-Agent": USER_AGENT,
|
|
||||||
}
|
|
||||||
params = {"min_confidence": min_confidence}
|
|
||||||
try:
|
|
||||||
async with session.get(url, headers=headers, params=params) as response:
|
|
||||||
if not response.ok:
|
|
||||||
logger.info("Unable to connect to %s: %s", url, response.reason)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
raw_data = await response.json()
|
|
||||||
except aiohttp.client_exceptions.ContentTypeError as err:
|
|
||||||
logger.exception(err)
|
|
||||||
return
|
|
||||||
|
|
||||||
return {
|
|
||||||
"connector": connector,
|
|
||||||
"results": connector.process_search_response(
|
|
||||||
query, raw_data, min_confidence
|
|
||||||
),
|
|
||||||
}
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
logger.info("Connection timed out for url: %s", url)
|
|
||||||
except aiohttp.ClientError as err:
|
|
||||||
logger.info(err)
|
|
||||||
|
|
||||||
|
|
||||||
async def async_connector_search(query, items, min_confidence):
|
async def async_connector_search(query, items, min_confidence):
|
||||||
"""Try a number of requests simultaneously"""
|
"""Try a number of requests simultaneously"""
|
||||||
timeout = aiohttp.ClientTimeout(total=SEARCH_TIMEOUT)
|
timeout = aiohttp.ClientTimeout(total=SEARCH_TIMEOUT)
|
||||||
|
@ -64,7 +29,7 @@ async def async_connector_search(query, items, min_confidence):
|
||||||
for url, connector in items:
|
for url, connector in items:
|
||||||
tasks.append(
|
tasks.append(
|
||||||
asyncio.ensure_future(
|
asyncio.ensure_future(
|
||||||
get_results(session, url, min_confidence, query, connector)
|
connector.get_results(session, url, min_confidence, query)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue