logo

searx

My custom branche(s) on searx, a meta-search engine
commit: 1f0e6ce65ba5e255f423ad8ec6967d67fd4aec0b
parent: 952473d297b2f0131196086f5824ae48f32d2922
Author: Adam Tauber <asciimoo@gmail.com>
Date:   Sun, 26 Apr 2015 12:19:15 -0400

Merge pull request #297 from dalf/proxies

[enh] Implement http proxies for outgoing requests. (see #236)

Diffstat:

Msearx/poolrequests.py4+++-
Msearx/settings.yml7+++++++
Msearx/webapp.py5++++-
3 files changed, 14 insertions(+), 2 deletions(-)

diff --git a/searx/poolrequests.py b/searx/poolrequests.py @@ -66,8 +66,10 @@ class SessionSinglePool(requests.Session): def request(method, url, **kwargs): - """same as requests/requests/api.py request(...) except it use SessionSinglePool""" + """same as requests/requests/api.py request(...) except it use SessionSinglePool and force proxies""" + global settings session = SessionSinglePool() + kwargs['proxies'] = settings.get('outgoing_proxies', None) response = session.request(method=method, url=url, **kwargs) session.close() return response diff --git a/searx/settings.yml b/searx/settings.yml @@ -10,6 +10,13 @@ server: image_proxy : False # Proxying image results through searx default_locale : "" # Default interface locale - leave blank to detect from browser information or use codes from the 'locales' config section +# uncomment below section if you want to use a proxy +# see http://docs.python-requests.org/en/latest/user/advanced/#proxies +# SOCKS proxies are not supported : see https://github.com/kennethreitz/requests/pull/478 +#outgoing_proxies : +# http : http://127.0.0.1:8080 +# https: http://127.0.0.1:8080 + # uncomment below section only if you have more than one network interface # which can be the source of outgoing search requests #source_ips: diff --git a/searx/webapp.py b/searx/webapp.py @@ -111,6 +111,8 @@ _category_names = (gettext('files'), gettext('news'), gettext('map')) +outgoing_proxies = settings.get('outgoing_proxies', None) + @babel.localeselector def get_locale(): @@ -645,7 +647,8 @@ def image_proxy(): resp = requests.get(url, stream=True, timeout=settings['server'].get('request_timeout', 2), - headers=headers) + headers=headers, + proxies=outgoing_proxies) if resp.status_code == 304: return '', resp.status_code