logo

searx

My custom branche(s) on searx, a meta-search engine
commit: 1fcf066a8188b28eb644ea304a131d40b1b341eb
parent: d0830d4edf8a9ee794d5897afd813c88f0ea720b
Author: Adam Tauber <asciimoo@gmail.com>
Date:   Sun,  2 Aug 2015 19:38:27 +0200

[mod] change settings file structure according to #314

Diffstat:

Msearx/autocomplete.py2+-
Msearx/engines/__init__.py2+-
Msearx/poolrequests.py8++++----
Msearx/settings.yml26+++++++++++++++-----------
Msearx/settings_robot.yml17++++++++++++-----
Msearx/utils.py3++-
Msearx/webapp.py22+++++++++++-----------
7 files changed, 46 insertions(+), 34 deletions(-)

diff --git a/searx/autocomplete.py b/searx/autocomplete.py @@ -29,7 +29,7 @@ from searx.poolrequests import get as http_get def get(*args, **kwargs): if 'timeout' not in kwargs: - kwargs['timeout'] = settings['server']['request_timeout'] + kwargs['timeout'] = settings['outgoing']['request_timeout'] return http_get(*args, **kwargs) diff --git a/searx/engines/__init__.py b/searx/engines/__init__.py @@ -75,7 +75,7 @@ def load_engine(engine_data): engine.safesearch = False if not hasattr(engine, 'timeout'): - engine.timeout = settings['server']['request_timeout'] + engine.timeout = settings['outgoing']['request_timeout'] if not hasattr(engine, 'shortcut'): engine.shortcut = '' diff --git a/searx/poolrequests.py b/searx/poolrequests.py @@ -39,11 +39,11 @@ class HTTPAdapterWithConnParams(requests.adapters.HTTPAdapter): block=self._pool_block, **self._conn_params) -if settings.get('source_ips'): +if settings['outgoing'].get('source_ips'): http_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=100, source_address=(source_ip, 0)) - for source_ip in settings['source_ips']) + for source_ip in settings['outgoing']['source_ips']) https_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=100, source_address=(source_ip, 0)) - for source_ip in settings['source_ips']) + for source_ip in settings['outgoing']['source_ips']) else: http_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=100), )) https_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=100), )) @@ -69,7 +69,7 @@ def request(method, url, **kwargs): """same as requests/requests/api.py request(...) except it use SessionSinglePool and force proxies""" global settings session = SessionSinglePool() - kwargs['proxies'] = settings.get('outgoing_proxies', None) + kwargs['proxies'] = settings['outgoing'].get('proxies', None) response = session.request(method=method, url=url, **kwargs) session.close() return response diff --git a/searx/settings.yml b/searx/settings.yml @@ -1,28 +1,32 @@ +general: + debug : False # Debug mode, only for development + server: port : 8888 bind_address : "127.0.0.1" # address to listen on secret_key : "ultrasecretkey" # change this! - debug : False # Debug mode, only for development - request_timeout : 2.0 # seconds base_url : False # Set custom base_url. Possible values: False or "https://your.custom.host/location/" + image_proxy : False # Proxying image results through searx + +ui: themes_path : "" # Custom ui themes path - leave it blank if you didn't change default_theme : oscar # ui theme - useragent_suffix : "" # suffix of searx_useragent, could contain informations like an email address to the administrator - image_proxy : False # Proxying image results through searx default_locale : "" # Default interface locale - leave blank to detect from browser information or use codes from the 'locales' config section +outgoing: # communication with search engines + request_timeout : 2.0 # seconds + useragent_suffix : "" # suffix of searx_useragent, could contain informations like an email address to the administrator # uncomment below section if you want to use a proxy # see http://docs.python-requests.org/en/latest/user/advanced/#proxies # SOCKS proxies are not supported : see https://github.com/kennethreitz/requests/pull/478 -#outgoing_proxies : -# http : http://127.0.0.1:8080 -# https: http://127.0.0.1:8080 - +# proxies : +# http : http://127.0.0.1:8080 +# https: http://127.0.0.1:8080 # uncomment below section only if you have more than one network interface # which can be the source of outgoing search requests -#source_ips: -# - 1.1.1.1 -# - 1.1.1.2 +# source_ips: +# - 1.1.1.1 +# - 1.1.1.2 engines: - name : wikipedia diff --git a/searx/settings_robot.yml b/searx/settings_robot.yml @@ -1,14 +1,21 @@ +general: + debug : False + server: port : 11111 bind_address : 127.0.0.1 secret_key : "ultrasecretkey" # change this! - debug : False - request_timeout : 3.0 # seconds - base_url: False + base_url : False + image_proxy : False + +ui: themes_path : "" default_theme : default - https_rewrite : True - image_proxy : False + default_locale : "" + +outgoing: + request_timeout : 1.0 # seconds + useragent_suffix : "" engines: - name : general_dummy diff --git a/searx/utils.py b/searx/utils.py @@ -26,6 +26,7 @@ ua_versions = ('33.0', ua_os = ('Windows NT 6.3; WOW64', 'X11; Linux x86_64', 'X11; Linux x86') + ua = "Mozilla/5.0 ({os}; rv:{version}) Gecko/20100101 Firefox/{version}" blocked_tags = ('script', @@ -40,7 +41,7 @@ def gen_useragent(): def searx_useragent(): return 'searx/{searx_version} {suffix}'.format( searx_version=VERSION_STRING, - suffix=settings['server'].get('useragent_suffix', '')) + suffix=settings['outgoing'].get('useragent_suffix', '')) def highlight_content(content, query): diff --git a/searx/webapp.py b/searx/webapp.py @@ -77,11 +77,11 @@ except ImportError: static_path, templates_path, themes =\ - get_themes(settings['themes_path'] - if settings.get('themes_path') + get_themes(settings['ui']['themes_path'] + if settings['ui']['themes_path'] else searx_dir) -default_theme = settings['server'].get('default_theme', 'default') +default_theme = settings['ui']['default_theme'] static_files = get_static_files(searx_dir) @@ -121,15 +121,15 @@ _category_names = (gettext('files'), gettext('news'), gettext('map')) -outgoing_proxies = settings.get('outgoing_proxies', None) +outgoing_proxies = settings['outgoing'].get('proxies', None) @babel.localeselector def get_locale(): locale = request.accept_languages.best_match(settings['locales'].keys()) - if settings['server'].get('default_locale'): - locale = settings['server']['default_locale'] + if settings['ui'].get('default_locale'): + locale = settings['ui']['default_locale'] if request.cookies.get('locale', '') in settings['locales']: locale = request.cookies.get('locale', '') @@ -640,12 +640,12 @@ def preferences(): stats[e.name] = {'time': None, 'warn_timeout': False, 'warn_time': False} - if e.timeout > settings['server']['request_timeout']: + if e.timeout > settings['outgoing']['request_timeout']: stats[e.name]['warn_timeout'] = True for engine_stat in get_engines_stats()[0][1]: stats[engine_stat.get('name')]['time'] = round(engine_stat.get('avg'), 3) - if engine_stat.get('avg') > settings['server']['request_timeout']: + if engine_stat.get('avg') > settings['outgoing']['request_timeout']: stats[engine_stat.get('name')]['warn_time'] = True # end of stats @@ -683,7 +683,7 @@ def image_proxy(): resp = requests.get(url, stream=True, - timeout=settings['server'].get('request_timeout', 2), + timeout=settings['outgoing']['request_timeout'], headers=headers, proxies=outgoing_proxies) @@ -775,8 +775,8 @@ def clear_cookies(): def run(): app.run( - debug=settings['server']['debug'], - use_debugger=settings['server']['debug'], + debug=settings['general']['debug'], + use_debugger=settings['general']['debug'], port=settings['server']['port'], host=settings['server']['bind_address'] )