2013-10-15 17:11:43 +00:00
|
|
|
from json import loads
|
2013-10-23 21:55:37 +00:00
|
|
|
from urllib import urlencode
|
2013-11-09 17:39:20 +00:00
|
|
|
from searx.utils import html_to_text
|
2013-10-14 21:09:13 +00:00
|
|
|
|
2013-10-23 21:55:37 +00:00
|
|
|
url = 'https://duckduckgo.com/'
|
2014-01-30 00:03:19 +00:00
|
|
|
search_url = url + 'd.js?{query}&p=1&s={offset}'
|
2014-01-05 12:58:17 +00:00
|
|
|
locale = 'us-en'
|
2013-10-14 21:09:13 +00:00
|
|
|
|
2014-01-30 00:03:19 +00:00
|
|
|
paging = True
|
|
|
|
|
2014-01-20 01:31:20 +00:00
|
|
|
|
2013-10-14 21:09:13 +00:00
|
|
|
def request(query, params):
|
2014-01-30 00:03:19 +00:00
|
|
|
offset = (params['pageno'] - 1) * 30
|
2014-01-20 01:31:20 +00:00
|
|
|
q = urlencode({'q': query,
|
|
|
|
'l': locale})
|
2014-01-30 00:03:19 +00:00
|
|
|
params['url'] = search_url.format(query=q, offset=offset)
|
2013-10-14 21:09:13 +00:00
|
|
|
return params
|
|
|
|
|
|
|
|
|
|
|
|
def response(resp):
|
2013-10-15 17:11:43 +00:00
|
|
|
results = []
|
|
|
|
search_res = loads(resp.text[resp.text.find('[{'):-2])[:-1]
|
|
|
|
for r in search_res:
|
|
|
|
if not r.get('t'):
|
|
|
|
continue
|
2014-01-20 01:31:20 +00:00
|
|
|
results.append({'title': r['t'],
|
|
|
|
'content': html_to_text(r['a']),
|
|
|
|
'url': r['u']})
|
2013-10-15 17:11:43 +00:00
|
|
|
return results
|