mirror of https://github.com/searx/searx
Merge branch 'searx:master' into master
This commit is contained in:
commit
41afee9a67
44
.pylintrc
44
.pylintrc
|
@ -59,7 +59,7 @@ confidence=
|
|||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=bad-whitespace, duplicate-code, consider-using-f-string
|
||||
disable=duplicate-code, consider-using-f-string
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
@ -105,39 +105,18 @@ max-nested-blocks=5
|
|||
|
||||
[BASIC]
|
||||
|
||||
# List of builtins function names that should not be used, separated by a comma
|
||||
bad-functions=map,filter,apply,input
|
||||
|
||||
# Naming hint for argument names
|
||||
argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct argument names
|
||||
argument-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Naming hint for attribute names
|
||||
attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct attribute names
|
||||
attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*)|([A-Z0-9_]*))$
|
||||
|
||||
# Bad variable names which should always be refused, separated by a comma
|
||||
bad-names=foo,bar,baz,toto,tutu,tata
|
||||
|
||||
# Naming hint for class attribute names
|
||||
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Regular expression matching correct class attribute names
|
||||
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||
|
||||
# Naming hint for class names
|
||||
class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Regular expression matching correct class names
|
||||
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||
|
||||
# Naming hint for constant names
|
||||
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||
|
||||
# Regular expression matching correct constant names
|
||||
const-rgx=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$
|
||||
#const-rgx=[f]?[A-Z_][a-zA-Z0-9_]{2,30}$
|
||||
|
@ -146,9 +125,6 @@ const-rgx=(([a-zA-Z_][a-zA-Z0-9_]*)|(__.*__))$
|
|||
# ones are exempt.
|
||||
docstring-min-length=-1
|
||||
|
||||
# Naming hint for function names
|
||||
function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct function names
|
||||
function-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
|
@ -158,21 +134,12 @@ good-names=i,j,k,ex,Run,_,log,cfg,id
|
|||
# Include a hint for the correct naming format with invalid-name
|
||||
include-naming-hint=no
|
||||
|
||||
# Naming hint for inline iteration names
|
||||
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Regular expression matching correct inline iteration names
|
||||
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||
|
||||
# Naming hint for method names
|
||||
method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct method names
|
||||
method-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Naming hint for module names
|
||||
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
|
||||
# Regular expression matching correct module names
|
||||
#module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||
module-rgx=([a-z_][a-z0-9_]*)$
|
||||
|
@ -189,9 +156,6 @@ no-docstring-rgx=^_
|
|||
# to this list to register other decorators that produce valid properties.
|
||||
property-classes=abc.abstractproperty
|
||||
|
||||
# Naming hint for variable names
|
||||
variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
|
||||
|
||||
# Regular expression matching correct variable names
|
||||
variable-rgx=(([a-z][a-zA-Z0-9_]{2,30})|(_[a-z0-9_]*)|([a-z]))$
|
||||
|
||||
|
@ -217,12 +181,6 @@ max-line-length=120
|
|||
# Maximum number of lines in a module
|
||||
max-module-lines=2000
|
||||
|
||||
# List of optional constructs for which whitespace checking is disabled. `dict-
|
||||
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
|
||||
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
|
||||
# `empty-line` allows space-only lines.
|
||||
no-space-check=trailing-comma,dict-separator
|
||||
|
||||
# Allow the body of a class to be on the same line as the declaration if body
|
||||
# contains single statement.No config file found, using default configuration
|
||||
|
||||
|
|
27
AUTHORS.rst
27
AUTHORS.rst
|
@ -166,3 +166,30 @@ generally made searx better:
|
|||
- @xenrox
|
||||
- @OliveiraHermogenes
|
||||
- Paul Alcock @Guilvareux
|
||||
- Ben Collerson
|
||||
- @3nprob
|
||||
- @plague-doctor
|
||||
- @CicadaCinema
|
||||
- @mikamp116
|
||||
- @Zackptg5
|
||||
- @darkmagic13
|
||||
- @CrocodileCroco
|
||||
- @allendema
|
||||
- Jordan Webb @jordemort
|
||||
- Samuel Dudik @dudik
|
||||
- @c1492
|
||||
- @nav1s
|
||||
- Igor Rzegocki @ajgon
|
||||
- Dmitrii Faiazov @scientia-ac-labore
|
||||
- @noctux
|
||||
- @jecarr
|
||||
- @israelyago
|
||||
- Georg @tacerus
|
||||
- Dario Nuevo @narcoticfresh
|
||||
- Andy Jones @andyljones
|
||||
- Maciej Urbański @rooterkyberian
|
||||
- @ilyakooo0
|
||||
- Eric Zhang @EricZhang456
|
||||
- @nathannaveen
|
||||
- @liimee
|
||||
- @james-still
|
||||
|
|
101
CHANGELOG.rst
101
CHANGELOG.rst
|
@ -1,3 +1,104 @@
|
|||
1.1.0 2022.08.07
|
||||
================
|
||||
|
||||
It has been a while since we released a new version of searx. Thus, we have lots of new things to offer, like new engines, autocompleter, plugins, etc. We got numerous contributions from ~30 new developers, but also we got many PRs from our recurring contributors.
|
||||
|
||||
Thank you so much for you support! We couldn't have release so many awesome things without you!
|
||||
|
||||
Core
|
||||
~~~~
|
||||
|
||||
- Drop Python 3.6 support #3133
|
||||
- Run tests under python 3.10 #3035
|
||||
- Reduce redundant docker build steps #2725
|
||||
- Allow overriding Docker repository when building docker image #2726
|
||||
- Add healthcheck endpoint for Docker #2992
|
||||
|
||||
New engines
|
||||
~~~~~~~~~~~
|
||||
|
||||
- Wordnik.com #2735
|
||||
- Bandcamp #2763
|
||||
- SJP - Słownik języka polskiego #2736
|
||||
- Wikimini #2819
|
||||
- Dogpile #2822
|
||||
- PyPI XPATH engine #2830
|
||||
- ManKier #2829
|
||||
- Kaufland.de #2915
|
||||
- ask.com #2898
|
||||
- gpodder.net (JSON) #2885
|
||||
- woxikon.de synonyme (xpath) #2883
|
||||
- Petalsearch.com engine (xpath) #2897
|
||||
- whaleslide.com #2861
|
||||
- azlyrics.com #2955
|
||||
- IMDB #2980
|
||||
- Prowlarr #3118
|
||||
- Tineye reverse image search #3040
|
||||
- Onesearch #3065
|
||||
- TVmaze #3246
|
||||
- Emojipedia #3278
|
||||
- Psychonautwiki by @dimethyltriptamine @kvch
|
||||
|
||||
Fixed engines
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- Remove hubsbpot tracking URL params #2723
|
||||
- Fix URL to solidtorrent result page #2786
|
||||
- Update onion engines to v3 #2904
|
||||
- Fix Seznam engine #2905
|
||||
- Add pagination support for Library Genesis #2887
|
||||
- Fix uppercase ip query #2991
|
||||
- Fix Libgen + Uncomment Ebay and Urbandictionary #2986
|
||||
- Fixed Hoogle engine #3146
|
||||
- Fix Digg engine #3150
|
||||
- Don't lump all search suggestions together in Yahoo #3208
|
||||
- Fix DDG safe search #3247
|
||||
- Fix Qwant: Remove extra q from URL #3091
|
||||
|
||||
New plugins
|
||||
~~~~~~~~~~~
|
||||
|
||||
- hostname_replace plugin to rewrite result hostnames #2724
|
||||
- search_operators plugin to filter search results using -, site: and -site: #3311
|
||||
|
||||
Fixed plugins
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
- Fix default_doi_resolver in preferences #2707
|
||||
- Add DOI resolver from sci-hub and replace default DOI #2706
|
||||
|
||||
Themes
|
||||
~~~~~~
|
||||
|
||||
- Fix dark "expand" button from infobox #2702
|
||||
- fix url_for(..., _external=True) in templates #2656
|
||||
- [enh] oscar: image thumbnail layout #2675
|
||||
- Improve text overflow of custom select #2985
|
||||
- Products results: add possibility to show if a product is in stock or not #3120
|
||||
- Configurable autofocus of search input (#1984) #3285
|
||||
- archive.today option for results page #3308
|
||||
- Fix keyboard hints for category tabs (#1187) #3276
|
||||
|
||||
Enhancements
|
||||
~~~~~~~~~~~~
|
||||
|
||||
- Allow overriding env vars SEARX_SETTINGS_PATH, UWSGI_SETTINGS_PATH #2717
|
||||
- correct typo/grammatical mistake #2744
|
||||
- Fix bug for 'FileNotFoundError' in 'standalone_searx.py' #2764
|
||||
- Fix grammar mistake in debug log output #2759
|
||||
- Fix typo #2768
|
||||
- Fix redirect when saving preferences #2760
|
||||
- Replace Makefile boilerplate by shell scripts #2594
|
||||
- Fix Qwant's fetch_languages function #2799
|
||||
- Apply HTTPS where possible + fix small typo #2922
|
||||
- Сhange in user-agent Firefox versions to latest #3008
|
||||
- Use engine-type when looking up supported_languages from JSON files #3002
|
||||
- Update about section of Invidious and Rumble + Change filtron error wording #2959
|
||||
- Verify that Tor proxy works every time searx starts #3015
|
||||
- Update settings_loader.get_user_settings_path() #3056
|
||||
- Fix wrong func call #3058
|
||||
- Improve ranking based on language #3053
|
||||
|
||||
1.0.0 2021.03.27
|
||||
================
|
||||
|
||||
|
|
64
README.rst
64
README.rst
|
@ -66,8 +66,66 @@ contact:
|
|||
.. _openhub: https://www.openhub.net/p/searx
|
||||
.. _twitter: https://twitter.com/Searx_engine
|
||||
|
||||
-------
|
||||
**************************
|
||||
Frequently asked questions
|
||||
**************************
|
||||
|
||||
|gluten free|
|
||||
Is searx in maintenance mode?
|
||||
#############################
|
||||
|
||||
.. |gluten free| image:: https://forthebadge.com/images/featured/featured-gluten-free.svg
|
||||
No, searx is accepting new features, including new engines. We are also adding
|
||||
engine fixes or other bug fixes when needed. Also, keep in mind that searx is
|
||||
maintained by volunteers who work in their free time. So some changes might take
|
||||
some time to be merged.
|
||||
|
||||
We reject features that might violate the privacy of users. If you really want
|
||||
such a feature, it must be disabled by default and warn users about the consequances
|
||||
of turning it off.
|
||||
|
||||
What is the difference between searx and SearxNG?
|
||||
#################################################
|
||||
|
||||
TL;DR: If you want to run a public instance, go with SearxNG. If you want to
|
||||
self host your own instance, choose searx.
|
||||
|
||||
SearxNG is a fork of searx, created by a former maintainer of searx. The fork
|
||||
was created because the majority of the maintainers at the time did not find
|
||||
the new proposed features privacy respecting enough. The most significant issue is with
|
||||
engine metrics.
|
||||
|
||||
Searx is built for privacy conscious users. It comes a unique set of
|
||||
challanges. One of the problems we face is that users rather not report bugs,
|
||||
because they do not want to publicly share what engines they use or what search
|
||||
query triggered a problem. It is a challenge we accepted.
|
||||
|
||||
The new metrics feature collects more information to make engine maintenance easier.
|
||||
We could have had better and more error reports to benefit searx maintainers.
|
||||
However, we believe that the users of searx must come first, not the
|
||||
software. We are willing to compromise on the lack of issue reports to avoid
|
||||
violating the privacy of users.
|
||||
|
||||
Furthermore, SearxNG is under heavy refactoring and dependencies are constantly updated, even
|
||||
if it is unnecessary. It increases the risk of introducing regressions. In searx
|
||||
we strive for stability, rather than moving fast and breaking things.
|
||||
|
||||
Is searx for me?
|
||||
################
|
||||
|
||||
Are you privacy conscious user? Then yes.
|
||||
|
||||
In searx we decided to double down on being privacy respecting. We are picking
|
||||
engine changes from SearxNG, but we are not implementing engine detailed
|
||||
monitoring and not adding a new UI that relies on Javascript.
|
||||
|
||||
If you are willing to give up some privacy respecting features, we encourage you to
|
||||
adopt SearxNG. Searx is targeted for privacy conscious users who run their
|
||||
instances locally, instead of using public instances.
|
||||
|
||||
Why should I use SearxNG?
|
||||
#########################
|
||||
|
||||
SearxNG has rolling releases, depencencies updated more frequently, and engines are fixed
|
||||
faster. It is easy to set up your own public instance, and monitor its
|
||||
perfomance and metrics. It is simple to maintain as an instance adminstrator.
|
||||
|
||||
As a user, it provides a prettier user interface and nicer experience.
|
||||
|
|
|
@ -130,12 +130,14 @@ Global Settings
|
|||
request_timeout : 2.0 # default timeout in seconds, can be override by engine
|
||||
# max_request_timeout: 10.0 # the maximum timeout in seconds
|
||||
useragent_suffix : "" # informations like an email address to the administrator
|
||||
pool_connections : 100 # Maximum number of allowable connections, or None for no limits. The default is 100.
|
||||
pool_maxsize : 10 # Number of allowable keep-alive connections, or None to always allow. The default is 10.
|
||||
enable_http2: True # See https://www.python-httpx.org/http2/
|
||||
pool_connections : 100 # Number of different hosts
|
||||
pool_maxsize : 10 # Number of simultaneous requests by host
|
||||
# uncomment below section if you want to use a proxy
|
||||
# proxies:
|
||||
# all://:
|
||||
# http:
|
||||
# - http://proxy1:8080
|
||||
# - http://proxy2:8080
|
||||
# https:
|
||||
# - http://proxy1:8080
|
||||
# - http://proxy2:8080
|
||||
# uncomment below section only if you have more than one network interface
|
||||
|
@ -143,7 +145,6 @@ Global Settings
|
|||
# source_ips:
|
||||
# - 1.1.1.1
|
||||
# - 1.1.1.2
|
||||
# - fe80::/126
|
||||
|
||||
|
||||
``request_timeout`` :
|
||||
|
@ -156,46 +157,20 @@ Global Settings
|
|||
Suffix to the user-agent searx uses to send requests to others engines. If an
|
||||
engine wish to block you, a contact info here may be useful to avoid that.
|
||||
|
||||
``keepalive_expiry``:
|
||||
Number of seconds to keep a connection in the pool. By default 5.0 seconds.
|
||||
|
||||
.. _httpx proxies: https://www.python-httpx.org/advanced/#http-proxying
|
||||
.. _requests proxies: https://requests.readthedocs.io/en/latest/user/advanced/#proxies
|
||||
.. _PySocks: https://pypi.org/project/PySocks/
|
||||
|
||||
``proxies`` :
|
||||
Define one or more proxies you wish to use, see `httpx proxies`_.
|
||||
Define one or more proxies you wish to use, see `requests proxies`_.
|
||||
If there are more than one proxy for one protocol (http, https),
|
||||
requests to the engines are distributed in a round-robin fashion.
|
||||
|
||||
- Proxy: `see <https://2.python-requests.org/en/latest/user/advanced/#proxies>`__.
|
||||
- SOCKS proxies are also supported: `see <https://2.python-requests.org/en/latest/user/advanced/#socks>`__
|
||||
|
||||
``source_ips`` :
|
||||
If you use multiple network interfaces, define from which IP the requests must
|
||||
be made. Example:
|
||||
|
||||
* ``0.0.0.0`` any local IPv4 address.
|
||||
* ``::`` any local IPv6 address.
|
||||
* ``192.168.0.1``
|
||||
* ``[ 192.168.0.1, 192.168.0.2 ]`` these two specific IP addresses
|
||||
* ``fe80::60a2:1691:e5a2:ee1f``
|
||||
* ``fe80::60a2:1691:e5a2:ee1f/126`` all IP addresses in this network.
|
||||
* ``[ 192.168.0.1, fe80::/126 ]``
|
||||
|
||||
``retries`` :
|
||||
Number of retry in case of an HTTP error.
|
||||
On each retry, searx uses an different proxy and source ip.
|
||||
|
||||
``retry_on_http_error`` :
|
||||
Retry request on some HTTP status code.
|
||||
|
||||
Example:
|
||||
|
||||
* ``true`` : on HTTP status code between 400 and 599.
|
||||
* ``403`` : on HTTP status code 403.
|
||||
* ``[403, 429]``: on HTTP status code 403 and 429.
|
||||
|
||||
``enable_http2`` :
|
||||
Enable by default. Set to ``False`` to disable HTTP/2.
|
||||
|
||||
``max_redirects`` :
|
||||
30 by default. Maximum redirect before it is an error.
|
||||
be made. This parameter is ignored when ``proxies`` is set.
|
||||
|
||||
|
||||
``locales:``
|
||||
|
@ -241,13 +216,6 @@ Engine settings
|
|||
api_key : 'apikey'
|
||||
disabled : True
|
||||
language : en_US
|
||||
#enable_http: False
|
||||
#enable_http2: False
|
||||
#retries: 1
|
||||
#retry_on_http_error: True # or 403 or [404, 429]
|
||||
#max_connections: 100
|
||||
#max_keepalive_connections: 10
|
||||
#keepalive_expiry: 5.0
|
||||
#proxies:
|
||||
# http:
|
||||
# - http://proxy1:8080
|
||||
|
@ -302,12 +270,6 @@ Engine settings
|
|||
``display_error_messages`` : default ``True``
|
||||
When an engine returns an error, the message is displayed on the user interface.
|
||||
|
||||
``network``: optional
|
||||
Use the network configuration from another engine.
|
||||
In addition, there are two default networks:
|
||||
* ``ipv4`` set ``local_addresses`` to ``0.0.0.0`` (use only IPv4 local addresses)
|
||||
* ``ipv6`` set ``local_addresses`` to ``::`` (use only IPv6 local addresses)
|
||||
|
||||
.. note::
|
||||
|
||||
A few more options are possible, but they are pretty specific to some
|
||||
|
|
32
docs/conf.py
32
docs/conf.py
|
@ -10,7 +10,7 @@ from searx.version import VERSION_STRING
|
|||
# Project --------------------------------------------------------------
|
||||
|
||||
project = u'searx'
|
||||
copyright = u'2015-2021, Adam Tauber, Noémi Ványi'
|
||||
copyright = u'2015-2022, Adam Tauber, Noémi Ványi'
|
||||
author = u'Adam Tauber'
|
||||
release, version = VERSION_STRING, VERSION_STRING
|
||||
highlight_language = 'none'
|
||||
|
@ -38,26 +38,26 @@ jinja_contexts = {
|
|||
extlinks = {}
|
||||
|
||||
# upstream links
|
||||
extlinks['wiki'] = ('https://github.com/searx/searx/wiki/%s', ' ')
|
||||
extlinks['pull'] = ('https://github.com/searx/searx/pull/%s', 'PR ')
|
||||
extlinks['wiki'] = ('https://github.com/searx/searx/wiki/%s', '%s')
|
||||
extlinks['pull'] = ('https://github.com/searx/searx/pull/%s', 'PR %s')
|
||||
|
||||
# links to custom brand
|
||||
extlinks['origin'] = (brand.GIT_URL + '/blob/' + brand.GIT_BRANCH + '/%s', 'git://')
|
||||
extlinks['patch'] = (brand.GIT_URL + '/commit/%s', '#')
|
||||
extlinks['search'] = (brand.SEARX_URL + '/%s', '#')
|
||||
extlinks['docs'] = (brand.DOCS_URL + '/%s', 'docs: ')
|
||||
extlinks['pypi'] = ('https://pypi.org/project/%s', 'PyPi: ')
|
||||
extlinks['man'] = ('https://manpages.debian.org/jump?q=%s', '')
|
||||
extlinks['origin'] = (brand.GIT_URL + '/blob/' + brand.GIT_BRANCH + '/%s', 'Origin: %s')
|
||||
extlinks['patch'] = (brand.GIT_URL + '/commit/%s', 'path %s')
|
||||
extlinks['search'] = (brand.SEARX_URL + '/%s', 'URL: %s')
|
||||
extlinks['docs'] = (brand.DOCS_URL + '/%s', 'docs: %s')
|
||||
extlinks['pypi'] = ('https://pypi.org/project/%s', 'PyPi: %s')
|
||||
extlinks['man'] = ('https://manpages.debian.org/jump?q=%s', 'man: %s')
|
||||
#extlinks['role'] = (
|
||||
# 'https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#role-%s', '')
|
||||
extlinks['duref'] = (
|
||||
'https://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#%s', '')
|
||||
'https://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html#%s', '%s')
|
||||
extlinks['durole'] = (
|
||||
'https://docutils.sourceforge.net/docs/ref/rst/roles.html#%s', '')
|
||||
'https://docutils.sourceforge.net/docs/ref/rst/roles.html#%s', '%s')
|
||||
extlinks['dudir'] = (
|
||||
'https://docutils.sourceforge.net/docs/ref/rst/directives.html#%s', '')
|
||||
'https://docutils.sourceforge.net/docs/ref/rst/directives.html#%s', '%s')
|
||||
extlinks['ctan'] = (
|
||||
'https://ctan.org/pkg/%s', 'CTAN: ')
|
||||
'https://ctan.org/pkg/%s', 'CTAN: %s')
|
||||
|
||||
extensions = [
|
||||
'sphinx.ext.imgmath',
|
||||
|
@ -67,7 +67,7 @@ extensions = [
|
|||
"sphinx.ext.intersphinx",
|
||||
"pallets_sphinx_themes",
|
||||
"sphinx_issues", # https://github.com/sloria/sphinx-issues/blob/master/README.rst
|
||||
"sphinxcontrib.jinja", # https://github.com/tardyp/sphinx-jinja
|
||||
"sphinx_jinja", # https://github.com/tardyp/sphinx-jinja
|
||||
"sphinxcontrib.programoutput", # https://github.com/NextThought/sphinxcontrib-programoutput
|
||||
'linuxdoc.kernel_include', # Implementation of the 'kernel-include' reST-directive.
|
||||
'linuxdoc.rstFlatTable', # Implementation of the 'flat-table' reST-directive.
|
||||
|
@ -101,13 +101,11 @@ imgmath_font_size = 14
|
|||
|
||||
html_theme_options = {"index_sidebar_logo": True}
|
||||
html_context = {"project_links": [] }
|
||||
html_context["project_links"].append(ProjectLink("Blog", "blog/index.html"))
|
||||
html_context["project_links"].append(ProjectLink("Blog", brand.DOCS_URL + "/blog/index.html"))
|
||||
if brand.GIT_URL:
|
||||
html_context["project_links"].append(ProjectLink("Source", brand.GIT_URL))
|
||||
if brand.WIKI_URL:
|
||||
html_context["project_links"].append(ProjectLink("Wiki", brand.WIKI_URL))
|
||||
if brand.PUBLIC_INSTANCES:
|
||||
html_context["project_links"].append(ProjectLink("Public instances", brand.PUBLIC_INSTANCES))
|
||||
if brand.TWITTER_URL:
|
||||
html_context["project_links"].append(ProjectLink("Twitter", brand.TWITTER_URL))
|
||||
if brand.ISSUE_URL:
|
||||
|
|
|
@ -44,7 +44,7 @@ categories list pages, in which the engine is working
|
|||
paging boolean support multible pages
|
||||
time_range_support boolean support search time range
|
||||
engine_type str ``online`` by default, other possibles values are
|
||||
``offline``, ``online_dictionnary``, ``online_currency``
|
||||
``offline``, ``online_dictionary``, ``online_currency``
|
||||
======================= =========== ========================================================
|
||||
|
||||
.. _engine settings:
|
||||
|
@ -132,7 +132,7 @@ language str specific language code like ``'en_US'``, o
|
|||
====================== ============== ========================================================================
|
||||
|
||||
|
||||
If the ``engine_type`` is ``online_dictionnary```, in addition to the ``online`` arguments:
|
||||
If the ``engine_type`` is ``online_dictionary```, in addition to the ``online`` arguments:
|
||||
|
||||
====================== ============ ========================================================================
|
||||
argument type default-value, information
|
||||
|
|
|
@ -8,9 +8,6 @@ Searx is a free internet metasearch engine which aggregates results from more
|
|||
than 70 search services. Users are neither tracked nor profiled. Additionally,
|
||||
searx can be used over Tor for online anonymity.
|
||||
|
||||
Get started with searx by using one of the Searx-instances_. If you don't trust
|
||||
anyone, you can set up your own, see :ref:`installation`.
|
||||
|
||||
.. sidebar:: Features
|
||||
|
||||
- Self hosted
|
||||
|
@ -33,5 +30,3 @@ anyone, you can set up your own, see :ref:`installation`.
|
|||
searx_extra/index
|
||||
utils/index
|
||||
blog/index
|
||||
|
||||
.. _Searx-instances: https://searx.space
|
||||
|
|
7
manage
7
manage
|
@ -107,8 +107,7 @@ fi
|
|||
export DOCS_BUILD
|
||||
|
||||
buildenv() {
|
||||
SEARX_DEBUG=1 pyenv.cmd python utils/build_env.py 2>&1 \
|
||||
| prefix_stdout "${_Blue}BUILDENV${_creset} "
|
||||
SEARX_DEBUG=1 pyenv.cmd python utils/build_env.py 2>&1
|
||||
return "${PIPESTATUS[0]}"
|
||||
}
|
||||
|
||||
|
@ -189,11 +188,9 @@ docker.build() {
|
|||
die 1 "there is no remote origin"
|
||||
fi
|
||||
|
||||
# This is a git repository
|
||||
|
||||
# "git describe" to get the Docker version (for example : v0.15.0-89-g0585788e)
|
||||
# awk to remove the "v" and the "g"
|
||||
SEARX_GIT_VERSION=$(git describe --match "v[0-9]*\.[0-9]*\.[0-9]*" HEAD 2>/dev/null | awk -F'-' '{OFS="-"; $1=substr($1, 2); if ($3) { $3=substr($3, 2); } print}')
|
||||
SEARX_GIT_VERSION=$(git describe --tags | awk -F'-' '{OFS="-"; $1=substr($1, 2); if ($3) { $3=substr($3, 2); } print}')
|
||||
|
||||
# add the suffix "-dirty" if the repository has uncommited change
|
||||
# /!\ HACK for searx/searx: ignore utils/brand.env
|
||||
|
|
|
@ -1,20 +1,19 @@
|
|||
mock==4.0.3
|
||||
nose2[coverage_plugin]==0.10.0
|
||||
nose2[coverage_plugin]==0.12.0
|
||||
cov-core==1.15.0
|
||||
pycodestyle==2.8.0
|
||||
pylint==2.12.2
|
||||
splinter==0.17.0
|
||||
pycodestyle==2.9.1
|
||||
pylint==2.14.5
|
||||
splinter==0.18.1
|
||||
transifex-client==0.14.3; python_version < '3.10'
|
||||
transifex-client==0.12.1; python_version == '3.10'
|
||||
selenium==4.1.0
|
||||
twine==3.7.1
|
||||
transifex-client==0.12.5; python_version == '3.10'
|
||||
selenium==4.4.3
|
||||
twine==4.0.1
|
||||
Pallets-Sphinx-Themes==2.0.2
|
||||
docutils==0.16
|
||||
Sphinx==4.4.0
|
||||
docutils==0.18
|
||||
Sphinx==5.1.1
|
||||
sphinx-issues==3.0.1
|
||||
sphinx-jinja==1.4.0
|
||||
sphinx-tabs==3.2.0
|
||||
sphinx-jinja==2.0.2
|
||||
sphinx-tabs==3.4.1
|
||||
sphinxcontrib-programoutput==0.17
|
||||
sphinx-autobuild==2021.3.14
|
||||
linuxdoc==20211220
|
||||
aiounittest==1.4.1
|
||||
|
|
|
@ -1,16 +1,13 @@
|
|||
certifi==2022.5.18.1
|
||||
babel==2.9.1
|
||||
Brotli==1.0.9
|
||||
babel==2.10.3
|
||||
certifi==2022.6.15
|
||||
flask-babel==2.0.0
|
||||
flask==2.1.1
|
||||
flask==2.2.2
|
||||
jinja2==3.1.2
|
||||
lxml==4.9.0
|
||||
pygments==2.8.0
|
||||
langdetect==1.0.9
|
||||
lxml==4.9.1
|
||||
pygments==2.12.0
|
||||
python-dateutil==2.8.2
|
||||
pyyaml==6.0
|
||||
httpx[http2]==0.23.0
|
||||
Brotli==1.0.9
|
||||
uvloop==0.16.0; python_version >= '3.7'
|
||||
uvloop==0.14.0; python_version < '3.7'
|
||||
httpx-socks[asyncio]==0.7.4
|
||||
langdetect==1.0.9
|
||||
setproctitle==1.2.2
|
||||
requests[socks]==2.28.1
|
||||
setproctitle==1.3.2
|
||||
|
|
|
@ -20,12 +20,10 @@ from lxml import etree
|
|||
from json import loads
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from httpx import HTTPError
|
||||
|
||||
from requests import RequestException
|
||||
|
||||
from searx import settings
|
||||
from searx.data import ENGINES_LANGUAGES
|
||||
from searx.network import get as http_get
|
||||
from searx.poolrequests import get as http_get
|
||||
from searx.exceptions import SearxEngineResponseException
|
||||
|
||||
|
||||
|
@ -154,5 +152,5 @@ def search_autocomplete(backend_name, query, lang):
|
|||
|
||||
try:
|
||||
return backend(query, lang)
|
||||
except (HTTPError, SearxEngineResponseException):
|
||||
except (RequestException, SearxEngineResponseException):
|
||||
return []
|
||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -1,10 +1,8 @@
|
|||
{
|
||||
"versions": [
|
||||
"99.0.1",
|
||||
"99.0",
|
||||
"98.0.2",
|
||||
"98.0.1",
|
||||
"98.0"
|
||||
"102.0",
|
||||
"101.0.1",
|
||||
"101.0"
|
||||
],
|
||||
"os": [
|
||||
"Windows NT 10.0; WOW64",
|
||||
|
|
|
@ -114,7 +114,6 @@
|
|||
"Q106645257": "MN m",
|
||||
"Q106645261": "kN m",
|
||||
"Q106645290": "dN m",
|
||||
"Q106647058": "u",
|
||||
"Q1067722": "Fg",
|
||||
"Q106777906": "μS/m",
|
||||
"Q106777917": "S/cm",
|
||||
|
@ -507,6 +506,7 @@
|
|||
"Q3013059": "ka",
|
||||
"Q304479": "tr",
|
||||
"Q305896": "DPI",
|
||||
"Q3095010": "γ",
|
||||
"Q31889818": "ppq",
|
||||
"Q3194304": "kb",
|
||||
"Q3207456": "mW",
|
||||
|
|
|
@ -27,7 +27,7 @@ from searx import settings
|
|||
from searx import logger
|
||||
from searx.data import ENGINES_LANGUAGES
|
||||
from searx.exceptions import SearxEngineResponseException
|
||||
from searx.network import get, initialize as initialize_network, set_context_network_name
|
||||
from searx.poolrequests import get, get_proxy_cycles
|
||||
from searx.utils import load_module, match_language, get_engine_from_settings, gen_useragent
|
||||
|
||||
|
||||
|
@ -89,6 +89,8 @@ def load_engine(engine_data):
|
|||
engine.categories = []
|
||||
else:
|
||||
engine.categories = list(map(str.strip, param_value.split(',')))
|
||||
elif param_name == 'proxies':
|
||||
engine.proxies = get_proxy_cycles(param_value)
|
||||
else:
|
||||
setattr(engine, param_name, param_value)
|
||||
|
||||
|
@ -283,3 +285,24 @@ def load_engines(engine_list):
|
|||
if engine is not None:
|
||||
engines[engine.name] = engine
|
||||
return engines
|
||||
|
||||
|
||||
def initialize_engines(engine_list):
|
||||
load_engines(engine_list)
|
||||
|
||||
def engine_init(engine_name, init_fn):
|
||||
try:
|
||||
init_fn(get_engine_from_settings(engine_name))
|
||||
except SearxEngineResponseException as exc:
|
||||
logger.warn('%s engine: Fail to initialize // %s', engine_name, exc)
|
||||
except Exception:
|
||||
logger.exception('%s engine: Fail to initialize', engine_name)
|
||||
else:
|
||||
logger.debug('%s engine: Initialized', engine_name)
|
||||
|
||||
for engine_name, engine in engines.items():
|
||||
if hasattr(engine, 'init'):
|
||||
init_fn = getattr(engine, 'init')
|
||||
if init_fn:
|
||||
logger.debug('%s engine: Starting background initialization', engine_name)
|
||||
threading.Thread(target=engine_init, args=(engine_name, init_fn)).start()
|
||||
|
|
|
@ -52,7 +52,8 @@ def request(query, params):
|
|||
offset=offset)
|
||||
|
||||
params['url'] = base_url + search_path
|
||||
params['headers']['User-Agent'] = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36'
|
||||
params['headers']['User-Agent'] = ('Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 '
|
||||
'(KHTML, like Gecko) Chrome/97.0.4692.71 Safari/537.36')
|
||||
|
||||
return params
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ about = {
|
|||
"results": 'HTML',
|
||||
}
|
||||
|
||||
engine_type = 'online_dictionnary'
|
||||
engine_type = 'online_dictionary'
|
||||
categories = ['general']
|
||||
url = 'https://dictzone.com/{from_lang}-{to_lang}-dictionary/{query}'
|
||||
weight = 100
|
||||
|
@ -27,9 +27,7 @@ https_support = True
|
|||
|
||||
|
||||
def request(query, params):
|
||||
params['url'] = url.format(from_lang=params['from_lang'][2],
|
||||
to_lang=params['to_lang'][2],
|
||||
query=params['query'])
|
||||
params['url'] = url.format(from_lang=params['from_lang'][2], to_lang=params['to_lang'][2], query=params['query'])
|
||||
|
||||
return params
|
||||
|
||||
|
@ -51,10 +49,12 @@ def response(resp):
|
|||
if t.strip():
|
||||
to_results.append(to_result.text_content())
|
||||
|
||||
results.append({
|
||||
'url': urljoin(str(resp.url), '?%d' % k),
|
||||
'title': from_result.text_content(),
|
||||
'content': '; '.join(to_results)
|
||||
})
|
||||
results.append(
|
||||
{
|
||||
'url': urljoin(str(resp.url), '?%d' % k),
|
||||
'title': from_result.text_content(),
|
||||
'content': '; '.join(to_results),
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
|
|
@ -1,18 +1,24 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# lint: pylint
|
||||
"""
|
||||
DuckDuckGo (Web)
|
||||
"""DuckDuckGo Lite
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import match_language, HTMLTextExtractor
|
||||
import re
|
||||
from searx.network import get
|
||||
|
||||
from lxml.html import fromstring
|
||||
|
||||
from searx.utils import (
|
||||
dict_subset,
|
||||
eval_xpath,
|
||||
eval_xpath_getindex,
|
||||
extract_text,
|
||||
match_language,
|
||||
)
|
||||
from searx.poolrequests import get
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": 'https://duckduckgo.com/',
|
||||
"website": 'https://lite.duckduckgo.com/lite',
|
||||
"wikidata_id": 'Q12805',
|
||||
"official_api_documentation": 'https://duckduckgo.com/api',
|
||||
"use_official_api": False,
|
||||
|
@ -21,13 +27,11 @@ about = {
|
|||
}
|
||||
|
||||
# engine dependent config
|
||||
categories = ['general']
|
||||
categories = ['general', 'web']
|
||||
paging = True
|
||||
supported_languages_url = 'https://duckduckgo.com/util/u172.js'
|
||||
number_of_results = 10
|
||||
supported_languages_url = 'https://duckduckgo.com/util/u588.js'
|
||||
time_range_support = True
|
||||
safesearch = True
|
||||
VQD_REGEX = r"vqd='(\d+-\d+-\d+)'"
|
||||
|
||||
language_aliases = {
|
||||
'ar-SA': 'ar-XA',
|
||||
'es-419': 'es-XL',
|
||||
|
@ -35,16 +39,14 @@ language_aliases = {
|
|||
'ko': 'kr-KR',
|
||||
'sl-SI': 'sl-SL',
|
||||
'zh-TW': 'tzh-TW',
|
||||
'zh-HK': 'tzh-HK'
|
||||
'zh-HK': 'tzh-HK',
|
||||
}
|
||||
|
||||
time_range_dict = {'day': 'd', 'week': 'w', 'month': 'm', 'year': 'y'}
|
||||
|
||||
# search-url
|
||||
url = 'https://links.duckduckgo.com/d.js?'
|
||||
url_ping = 'https://duckduckgo.com/t/sl_h'
|
||||
time_range_dict = {'day': 'd',
|
||||
'week': 'w',
|
||||
'month': 'm',
|
||||
'year': 'y'}
|
||||
url = 'https://lite.duckduckgo.com/lite'
|
||||
url_ping = 'https://duckduckgo.com/t/sl_l'
|
||||
|
||||
|
||||
# match query's language to a region code that duckduckgo will accept
|
||||
|
@ -59,103 +61,111 @@ def get_region_code(lang, lang_list=None):
|
|||
return lang_parts[1].lower() + '-' + lang_parts[0].lower()
|
||||
|
||||
|
||||
def get_vqd(query, headers):
|
||||
resp = get(f"https://duckduckgo.com/?q={query}&ia=web", headers=headers)
|
||||
resp = re.findall(VQD_REGEX, resp.text)
|
||||
return resp[0]
|
||||
|
||||
|
||||
def request(query, params):
|
||||
|
||||
params['method'] = 'GET'
|
||||
params['url'] = url
|
||||
params['method'] = 'POST'
|
||||
|
||||
vqd = get_vqd(query, params['headers'])
|
||||
dl, ct = match_language(params['language'], supported_languages, language_aliases, 'wt-WT').split('-')
|
||||
query_dict = {
|
||||
'q': query,
|
||||
't': 'D',
|
||||
'l': params['language'],
|
||||
'kl': f'{ct}-{dl}',
|
||||
's': (params['pageno'] - 1) * number_of_results,
|
||||
'dl': dl,
|
||||
'ct': ct,
|
||||
'ss_mkt': get_region_code(params['language'], supported_languages),
|
||||
'df': params['time_range'],
|
||||
'vqd': vqd,
|
||||
'ex': -2,
|
||||
'sp': '1',
|
||||
'bpa': '1',
|
||||
'biaexp': 'b',
|
||||
'msvrtexp': 'b'
|
||||
}
|
||||
if params['safesearch'] == 2: # STRICT
|
||||
del query_dict['t']
|
||||
query_dict['p'] = 1
|
||||
query_dict.update({
|
||||
'videxp': 'a',
|
||||
'nadse': 'b',
|
||||
'eclsexp': 'a',
|
||||
'stiaexp': 'a',
|
||||
'tjsexp': 'b',
|
||||
'related': 'b',
|
||||
'msnexp': 'a'
|
||||
})
|
||||
elif params['safesearch'] == 1: # MODERATE
|
||||
query_dict['ex'] = -1
|
||||
query_dict.update({
|
||||
'nadse': 'b',
|
||||
'eclsexp': 'b',
|
||||
'tjsexp': 'b'
|
||||
})
|
||||
else: # OFF
|
||||
query_dict['ex'] = -2
|
||||
query_dict.update({
|
||||
'nadse': 'b',
|
||||
'eclsexp': 'b',
|
||||
'tjsexp': 'b'
|
||||
})
|
||||
params['data']['q'] = query
|
||||
|
||||
params['allow_redirects'] = False
|
||||
params['data'] = query_dict
|
||||
params['cookies']['kl'] = params['data']['kl']
|
||||
# The API is not documented, so we do some reverse engineering and emulate
|
||||
# what https://lite.duckduckgo.com/lite/ does when you press "next Page"
|
||||
# link again and again ..
|
||||
|
||||
params['headers']['Content-Type'] = 'application/x-www-form-urlencoded'
|
||||
|
||||
# initial page does not have an offset
|
||||
if params['pageno'] == 2:
|
||||
# second page does have an offset of 30
|
||||
offset = (params['pageno'] - 1) * 30
|
||||
params['data']['s'] = offset
|
||||
params['data']['dc'] = offset + 1
|
||||
|
||||
elif params['pageno'] > 2:
|
||||
# third and following pages do have an offset of 30 + n*50
|
||||
offset = 30 + (params['pageno'] - 2) * 50
|
||||
params['data']['s'] = offset
|
||||
params['data']['dc'] = offset + 1
|
||||
|
||||
# initial page does not have additional data in the input form
|
||||
if params['pageno'] > 1:
|
||||
# request the second page (and more pages) needs 'o' and 'api' arguments
|
||||
params['data']['o'] = 'json'
|
||||
params['data']['api'] = 'd.js'
|
||||
|
||||
# initial page does not have additional data in the input form
|
||||
if params['pageno'] > 2:
|
||||
# request the third page (and more pages) some more arguments
|
||||
params['data']['nextParams'] = ''
|
||||
params['data']['v'] = ''
|
||||
params['data']['vqd'] = ''
|
||||
|
||||
region_code = get_region_code(params['language'], supported_languages)
|
||||
if region_code:
|
||||
params['data']['kl'] = region_code
|
||||
params['cookies']['kl'] = region_code
|
||||
|
||||
params['data']['df'] = ''
|
||||
if params['time_range'] in time_range_dict:
|
||||
params['data']['df'] = time_range_dict[params['time_range']]
|
||||
params['cookies']['df'] = time_range_dict[params['time_range']]
|
||||
params['url'] = url + urlencode(params['data'])
|
||||
|
||||
return params
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
|
||||
headers_ping = dict_subset(resp.request.headers, ['User-Agent', 'Accept-Encoding', 'Accept', 'Cookie'])
|
||||
get(url_ping, headers=headers_ping)
|
||||
|
||||
if resp.status_code == 303:
|
||||
return []
|
||||
|
||||
# parse the response
|
||||
results = []
|
||||
doc = fromstring(resp.text)
|
||||
|
||||
data = re.findall(r"DDG\.pageLayout\.load\('d',(\[.+\])\);DDG\.duckbar\.load\('images'", str(resp.text))
|
||||
try:
|
||||
search_data = loads(data[0].replace('/\t/g', ' '))
|
||||
except IndexError:
|
||||
return
|
||||
result_table = eval_xpath(doc, '//html/body/form/div[@class="filters"]/table')
|
||||
if not len(result_table) >= 3:
|
||||
# no more results
|
||||
return []
|
||||
result_table = result_table[2]
|
||||
|
||||
if len(search_data) == 1 and ('n' not in search_data[0]):
|
||||
only_result = search_data[0]
|
||||
if ((only_result.get('da') is not None and only_result.get('t') == 'EOF') or
|
||||
only_result.get('a') is not None or only_result.get('d') == 'google.com search'):
|
||||
return
|
||||
tr_rows = eval_xpath(result_table, './/tr')
|
||||
|
||||
for search_result in search_data:
|
||||
if 'n' in search_result:
|
||||
# In the last <tr> is the form of the 'previous/next page' links
|
||||
tr_rows = tr_rows[:-1]
|
||||
|
||||
len_tr_rows = len(tr_rows)
|
||||
offset = 0
|
||||
|
||||
while len_tr_rows >= offset + 4:
|
||||
|
||||
# assemble table rows we need to scrap
|
||||
tr_title = tr_rows[offset]
|
||||
tr_content = tr_rows[offset + 1]
|
||||
offset += 4
|
||||
|
||||
# ignore sponsored Adds <tr class="result-sponsored">
|
||||
if tr_content.get('class') == 'result-sponsored':
|
||||
continue
|
||||
title = HTMLTextExtractor()
|
||||
title.feed(search_result.get('t'))
|
||||
content = HTMLTextExtractor()
|
||||
content.feed(search_result.get('a'))
|
||||
|
||||
results.append({'title': title.get_text(),
|
||||
'content': content.get_text(),
|
||||
'url': search_result.get('u')})
|
||||
a_tag = eval_xpath_getindex(tr_title, './/td//a[@class="result-link"]', 0, None)
|
||||
if a_tag is None:
|
||||
continue
|
||||
|
||||
td_content = eval_xpath_getindex(tr_content, './/td[@class="result-snippet"]', 0, None)
|
||||
if td_content is None:
|
||||
continue
|
||||
|
||||
results.append(
|
||||
{
|
||||
'title': a_tag.text_content(),
|
||||
'content': extract_text(td_content),
|
||||
'url': a_tag.get('href'),
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
|
@ -165,7 +175,7 @@ def _fetch_supported_languages(resp):
|
|||
# response is a js file with regions as an embedded object
|
||||
response_page = resp.text
|
||||
response_page = response_page[response_page.find('regions:{') + 8:]
|
||||
response_page = response_page[:response_page.find('}') + 1]
|
||||
response_page = response_page[: response_page.find('}') + 1]
|
||||
|
||||
regions_json = loads(response_page)
|
||||
supported_languages = map((lambda x: x[3:] + '-' + x[:2].upper()), regions_json.keys())
|
||||
|
|
|
@ -8,7 +8,7 @@ from urllib.parse import urlencode
|
|||
from searx.exceptions import SearxEngineAPIException
|
||||
from searx.engines.duckduckgo import get_region_code
|
||||
from searx.engines.duckduckgo import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||
from searx.network import get
|
||||
from searx.poolrequests import get
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
"""
|
||||
|
||||
from json import loads, dumps
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from searx.exceptions import SearxEngineAPIException
|
||||
|
||||
|
||||
|
@ -31,7 +32,7 @@ def request(query, params):
|
|||
return params
|
||||
|
||||
if username and password:
|
||||
params['auth'] = (username, password)
|
||||
params['auth'] = HTTPBasicAuth(username, password)
|
||||
|
||||
params['url'] = search_url
|
||||
params['method'] = 'GET'
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
Emojipedia
|
||||
"""
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from lxml import html
|
||||
|
||||
from searx import logger
|
||||
from searx.utils import (
|
||||
eval_xpath_list,
|
||||
eval_xpath_getindex,
|
||||
extract_text,
|
||||
)
|
||||
|
||||
logger = logger.getChild('Emojipedia engine')
|
||||
|
||||
about = {
|
||||
"website": 'https://emojipedia.org',
|
||||
"wikidata_id": None,
|
||||
"official_api_documentation": None,
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
}
|
||||
|
||||
categories = []
|
||||
paging = False
|
||||
time_range_support = False
|
||||
|
||||
base_url = 'https://emojipedia.org'
|
||||
search_url = base_url + '/search/?{query}'
|
||||
|
||||
|
||||
def request(query, params):
|
||||
params['url'] = search_url.format(
|
||||
query=urlencode({'q': query}),
|
||||
)
|
||||
return params
|
||||
|
||||
|
||||
def response(resp):
|
||||
results = []
|
||||
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
for result in eval_xpath_list(dom, "//ol[@class='search-results']/li"):
|
||||
|
||||
extracted_desc = extract_text(eval_xpath_getindex(result, './/p', 0))
|
||||
|
||||
if 'No results found.' in extracted_desc:
|
||||
break
|
||||
|
||||
link = eval_xpath_getindex(result, './/h2/a', 0)
|
||||
|
||||
url = base_url + link.attrib.get('href')
|
||||
title = extract_text(link)
|
||||
content = extracted_desc
|
||||
|
||||
res = {
|
||||
'url': url,
|
||||
'title': title,
|
||||
'content': content
|
||||
}
|
||||
|
||||
results.append(res)
|
||||
|
||||
return results
|
|
@ -7,8 +7,8 @@
|
|||
import re
|
||||
from json import loads, JSONDecodeError
|
||||
from urllib.parse import urlencode
|
||||
from searx.network import get
|
||||
from searx.exceptions import SearxEngineResponseException
|
||||
from searx.poolrequests import get
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
|
|
@ -10,7 +10,8 @@ Definitions`_.
|
|||
|
||||
# pylint: disable=invalid-name, missing-function-docstring, too-many-branches
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import urlencode, urlparse
|
||||
from random import random
|
||||
from lxml import html
|
||||
from searx import logger
|
||||
from searx.utils import match_language, extract_text, eval_xpath, eval_xpath_list, eval_xpath_getindex
|
||||
|
@ -108,8 +109,8 @@ filter_mapping = {
|
|||
# specific xpath variables
|
||||
# ------------------------
|
||||
|
||||
# google results are grouped into <div class="g ..." ../>
|
||||
results_xpath = '//div[@id="search"]//div[contains(@class, "g ")]'
|
||||
# google results are grouped into <div class="jtfYYd ..." ../>
|
||||
results_xpath = '//div[contains(@class, "jtfYYd")]'
|
||||
results_xpath_mobile_ui = '//div[contains(@class, "g ")]'
|
||||
|
||||
# google *sections* are no usual *results*, we ignore them
|
||||
|
@ -194,7 +195,8 @@ def get_lang_info(params, lang_list, custom_aliases, supported_any_language):
|
|||
return ret_val
|
||||
|
||||
def detect_google_sorry(resp):
|
||||
if resp.url.host == 'sorry.google.com' or resp.url.path.startswith('/sorry'):
|
||||
resp_url = urlparse(resp.url)
|
||||
if resp_url.netloc == 'sorry.google.com' or resp_url.path.startswith('/sorry'):
|
||||
raise SearxEngineCaptchaException()
|
||||
|
||||
|
||||
|
@ -222,6 +224,7 @@ def request(query, params):
|
|||
'oe': "utf8",
|
||||
'start': offset,
|
||||
'filter': '0',
|
||||
'ucbcb': 1,
|
||||
**additional_parameters,
|
||||
})
|
||||
|
||||
|
@ -234,6 +237,7 @@ def request(query, params):
|
|||
params['url'] = query_url
|
||||
|
||||
logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
|
||||
params['cookies']['CONSENT'] = "PENDING+" + str(random()*100)
|
||||
params['headers'].update(lang_info['headers'])
|
||||
if use_mobile_ui:
|
||||
params['headers']['Accept'] = '*/*'
|
||||
|
|
|
@ -109,6 +109,7 @@ def request(query, params):
|
|||
**lang_info['params'],
|
||||
'ie': "utf8",
|
||||
'oe': "utf8",
|
||||
'ucbcd': 1,
|
||||
'num': 30,
|
||||
})
|
||||
|
||||
|
@ -121,6 +122,7 @@ def request(query, params):
|
|||
params['url'] = query_url
|
||||
|
||||
logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
|
||||
params['cookies']['CONSENT'] = "YES+"
|
||||
params['headers'].update(lang_info['headers'])
|
||||
params['headers']['Accept'] = (
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||
|
|
|
@ -21,6 +21,7 @@ import binascii
|
|||
import re
|
||||
from urllib.parse import urlencode
|
||||
from base64 import b64decode
|
||||
from random import random
|
||||
from lxml import html
|
||||
|
||||
from searx import logger
|
||||
|
@ -104,6 +105,7 @@ def request(query, params):
|
|||
**lang_info['params'],
|
||||
'ie': "utf8",
|
||||
'oe': "utf8",
|
||||
'ucbcb': 1,
|
||||
'gl': lang_info['country'],
|
||||
}) + ('&ceid=%s' % ceid) # ceid includes a ':' character which must not be urlencoded
|
||||
|
||||
|
@ -111,10 +113,12 @@ def request(query, params):
|
|||
params['url'] = query_url
|
||||
|
||||
logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
|
||||
|
||||
params['cookies']['CONSENT'] = "PENDING+" + str(random()*100)
|
||||
params['headers'].update(lang_info['headers'])
|
||||
params['headers']['Accept'] = (
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||
)
|
||||
)
|
||||
|
||||
return params
|
||||
|
||||
|
|
|
@ -0,0 +1,69 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
Google Play Apps
|
||||
"""
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from lxml import html
|
||||
from searx.utils import (
|
||||
eval_xpath,
|
||||
extract_url,
|
||||
extract_text,
|
||||
eval_xpath_list,
|
||||
eval_xpath_getindex,
|
||||
)
|
||||
|
||||
about = {
|
||||
"website": "https://play.google.com/",
|
||||
"wikidata_id": "Q79576",
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": "HTML",
|
||||
}
|
||||
|
||||
categories = ["files", "apps"]
|
||||
search_url = "https://play.google.com/store/search?{query}&c=apps&ucbcb=1"
|
||||
|
||||
|
||||
def request(query, params):
|
||||
params["url"] = search_url.format(query=urlencode({"q": query}))
|
||||
params['cookies']['CONSENT'] = "YES+"
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def response(resp):
|
||||
results = []
|
||||
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
if eval_xpath(dom, '//div[@class="v6DsQb"]'):
|
||||
return []
|
||||
|
||||
spot = eval_xpath_getindex(dom, '//div[@class="ipRz4"]', 0, None)
|
||||
if spot is not None:
|
||||
url = extract_url(eval_xpath(spot, './a[@class="Qfxief"]/@href'), search_url)
|
||||
title = extract_text(eval_xpath(spot, './/div[@class="vWM94c"]'))
|
||||
content = extract_text(eval_xpath(spot, './/div[@class="LbQbAe"]'))
|
||||
img = extract_text(eval_xpath(spot, './/img[@class="T75of bzqKMd"]/@src'))
|
||||
|
||||
results.append({"url": url, "title": title, "content": content, "img_src": img})
|
||||
|
||||
more = eval_xpath_list(dom, '//c-wiz[@jsrenderer="RBsfwb"]//div[@role="listitem"]', min_len=1)
|
||||
for result in more:
|
||||
url = extract_url(eval_xpath(result, ".//a/@href"), search_url)
|
||||
title = extract_text(eval_xpath(result, './/span[@class="DdYX5"]'))
|
||||
content = extract_text(eval_xpath(result, './/span[@class="wMUdtb"]'))
|
||||
img = extract_text(
|
||||
eval_xpath(
|
||||
result,
|
||||
'.//img[@class="T75of stzEZd" or @class="T75of etjhNc Q8CSx "]/@src',
|
||||
)
|
||||
)
|
||||
|
||||
results.append({"url": url, "title": title, "content": content, "img_src": img})
|
||||
|
||||
for suggestion in eval_xpath_list(dom, '//c-wiz[@jsrenderer="qyd4Kb"]//div[@class="ULeU3b neq64b"]'):
|
||||
results.append({"suggestion": extract_text(eval_xpath(suggestion, './/div[@class="Epkrse "]'))})
|
||||
|
||||
return results
|
|
@ -12,6 +12,7 @@ Definitions`_.
|
|||
|
||||
from urllib.parse import urlencode
|
||||
from datetime import datetime
|
||||
from random import random
|
||||
from lxml import html
|
||||
from searx import logger
|
||||
|
||||
|
@ -85,13 +86,13 @@ def request(query, params):
|
|||
# subdomain is: scholar.google.xy
|
||||
lang_info['subdomain'] = lang_info['subdomain'].replace("www.", "scholar.")
|
||||
|
||||
query_url = 'https://'+ lang_info['subdomain'] + '/scholar' + "?" + urlencode({
|
||||
'q': query,
|
||||
**lang_info['params'],
|
||||
'ie': "utf8",
|
||||
'oe': "utf8",
|
||||
'start' : offset,
|
||||
})
|
||||
query_url = (
|
||||
'https://'
|
||||
+ lang_info['subdomain']
|
||||
+ '/scholar'
|
||||
+ "?"
|
||||
+ urlencode({'q': query, **lang_info['params'], 'ie': "utf8", 'oe': "utf8", 'start': offset, 'ucbcb': 1})
|
||||
)
|
||||
|
||||
query_url += time_range_url(params)
|
||||
|
||||
|
@ -99,6 +100,7 @@ def request(query, params):
|
|||
params['url'] = query_url
|
||||
|
||||
logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
|
||||
params['cookies']['CONSENT'] = "PENDING+" + str(random()*100)
|
||||
params['headers'].update(lang_info['headers'])
|
||||
params['headers']['Accept'] = (
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||
|
|
|
@ -22,6 +22,7 @@ Definitions`_. Not all parameters can be appied.
|
|||
|
||||
import re
|
||||
from urllib.parse import urlencode
|
||||
from random import random
|
||||
from lxml import html
|
||||
|
||||
from searx import logger
|
||||
|
@ -125,6 +126,7 @@ def request(query, params):
|
|||
'q': query,
|
||||
'tbm': "vid",
|
||||
**lang_info['params'],
|
||||
'ucbcb': 1,
|
||||
'ie': "utf8",
|
||||
'oe': "utf8",
|
||||
})
|
||||
|
@ -138,6 +140,7 @@ def request(query, params):
|
|||
params['url'] = query_url
|
||||
|
||||
logger.debug("HTTP header Accept-Language --> %s", lang_info.get('Accept-Language'))
|
||||
params['cookies']['CONSENT'] = "PENDING+" + str(random()*100)
|
||||
params['headers'].update(lang_info['headers'])
|
||||
params['headers']['Accept'] = (
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8'
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# lint: pylint
|
||||
"""Lingva (alternative Google Translate frontend)"""
|
||||
|
||||
from json import loads
|
||||
|
||||
about = {
|
||||
"website": 'https://lingva.ml',
|
||||
"wikidata_id": None,
|
||||
"official_api_documentation": 'https://github.com/thedaviddelta/lingva-translate#public-apis',
|
||||
"use_official_api": True,
|
||||
"require_api_key": False,
|
||||
"results": 'JSON',
|
||||
}
|
||||
|
||||
engine_type = 'online_dictionary'
|
||||
categories = ['general']
|
||||
|
||||
url = "https://lingva.ml"
|
||||
search_url = "{url}/api/v1/{from_lang}/{to_lang}/{query}"
|
||||
|
||||
|
||||
def request(_query, params):
|
||||
params['url'] = search_url.format(
|
||||
url=url, from_lang=params['from_lang'][1], to_lang=params['to_lang'][1], query=params['query']
|
||||
)
|
||||
return params
|
||||
|
||||
|
||||
def response(resp):
|
||||
results = []
|
||||
|
||||
result = loads(resp.text)
|
||||
info = result["info"]
|
||||
from_to_prefix = "%s-%s " % (resp.search_params['from_lang'][1], resp.search_params['to_lang'][1])
|
||||
|
||||
if "typo" in info:
|
||||
results.append({"suggestion": from_to_prefix + info["typo"]})
|
||||
|
||||
if 'definitions' in info: # pylint: disable=too-many-nested-blocks
|
||||
for definition in info['definitions']:
|
||||
if 'list' in definition:
|
||||
for item in definition['list']:
|
||||
if 'synonyms' in item:
|
||||
for synonym in item['synonyms']:
|
||||
results.append({"suggestion": from_to_prefix + synonym})
|
||||
|
||||
infobox = ""
|
||||
|
||||
for translation in info["extraTranslations"]:
|
||||
infobox += f"<b>{translation['type']}</b>"
|
||||
|
||||
for word in translation["list"]:
|
||||
infobox += f"<dl><dt>{word['word']}</dt>"
|
||||
|
||||
for meaning in word["meanings"]:
|
||||
infobox += f"<dd>{meaning}</dd>"
|
||||
|
||||
infobox += "</dl>"
|
||||
|
||||
results.append(
|
||||
{
|
||||
'infobox': result["translation"],
|
||||
'content': infobox,
|
||||
}
|
||||
)
|
||||
|
||||
return results
|
|
@ -54,7 +54,7 @@ def search(query, params):
|
|||
.limit(results_per_page)
|
||||
ret.append({'number_of_results': results.count()})
|
||||
for r in results:
|
||||
del(r['_id'])
|
||||
del r['_id']
|
||||
r = {str(k): str(v) for k, v in r.items()}
|
||||
r['template'] = result_template
|
||||
ret.append(r)
|
||||
|
|
|
@ -30,6 +30,7 @@ about = {
|
|||
# engine dependent config
|
||||
categories = ['map']
|
||||
paging = False
|
||||
language_support = True
|
||||
|
||||
# search-url
|
||||
base_url = 'https://nominatim.openstreetmap.org/'
|
||||
|
@ -141,6 +142,9 @@ def request(query, params):
|
|||
params['url'] = base_url + search_string.format(query=urlencode({'q': query}))
|
||||
params['route'] = route_re.match(query)
|
||||
params['headers']['User-Agent'] = searx_useragent()
|
||||
|
||||
accept_language = 'en' if params['language'] == 'all' else params['language']
|
||||
params['headers']['Accept-Language'] = accept_language
|
||||
return params
|
||||
|
||||
|
||||
|
@ -200,7 +204,7 @@ def get_wikipedia_image(raw_value):
|
|||
return get_external_url('wikimedia_image', raw_value)
|
||||
|
||||
|
||||
def fetch_wikidata(nominatim_json, user_langage):
|
||||
def fetch_wikidata(nominatim_json, user_language):
|
||||
"""Update nominatim_json using the result of an unique to wikidata
|
||||
|
||||
For result in nominatim_json:
|
||||
|
@ -221,9 +225,10 @@ def fetch_wikidata(nominatim_json, user_langage):
|
|||
wd_to_results.setdefault(wd_id, []).append(result)
|
||||
|
||||
if wikidata_ids:
|
||||
user_language = 'en' if user_language == 'all' else user_language.split('-')[0]
|
||||
wikidata_ids_str = " ".join(wikidata_ids)
|
||||
query = wikidata_image_sparql.replace('%WIKIDATA_IDS%', sparql_string_escape(wikidata_ids_str)).replace(
|
||||
'%LANGUAGE%', sparql_string_escape(user_langage)
|
||||
'%LANGUAGE%', sparql_string_escape(user_language)
|
||||
)
|
||||
wikidata_json = send_wikidata_query(query)
|
||||
for wd_result in wikidata_json.get('results', {}).get('bindings', {}):
|
||||
|
@ -238,7 +243,7 @@ def fetch_wikidata(nominatim_json, user_langage):
|
|||
# overwrite wikipedia link
|
||||
wikipedia_name = wd_result.get('wikipediaName', {}).get('value')
|
||||
if wikipedia_name:
|
||||
result['extratags']['wikipedia'] = user_langage + ':' + wikipedia_name
|
||||
result['extratags']['wikipedia'] = user_language + ':' + wikipedia_name
|
||||
# get website if not already defined
|
||||
website = wd_result.get('website', {}).get('value')
|
||||
if (
|
||||
|
|
|
@ -7,7 +7,7 @@ from flask_babel import gettext
|
|||
from lxml import etree
|
||||
from datetime import datetime
|
||||
from urllib.parse import urlencode
|
||||
from searx.network import get
|
||||
from searx.poolrequests import get
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
|
|
@ -33,7 +33,7 @@ from flask_babel import gettext
|
|||
|
||||
from searx.utils import match_language
|
||||
from searx.exceptions import SearxEngineAPIException
|
||||
from searx.network import raise_for_httperror
|
||||
from searx.raise_for_httperror import raise_for_httperror
|
||||
|
||||
|
||||
# about
|
||||
|
@ -86,15 +86,14 @@ def request(query, params):
|
|||
|
||||
# add language tag
|
||||
if params['language'] == 'all':
|
||||
params['url'] += '&locale=en_us'
|
||||
params['url'] += '&locale=en_US'
|
||||
else:
|
||||
language = match_language(
|
||||
params['language'],
|
||||
# pylint: disable=undefined-variable
|
||||
supported_languages,
|
||||
language_aliases,
|
||||
)
|
||||
params['url'] += '&locale=' + language.replace('-', '_').lower()
|
||||
params['url'] += '&locale=' + language.replace('-', '_')
|
||||
|
||||
params['raise_for_httperror'] = False
|
||||
return params
|
||||
|
@ -113,7 +112,14 @@ def response(resp):
|
|||
|
||||
# check for an API error
|
||||
if search_results.get('status') != 'success':
|
||||
msg = ",".join(data.get('message', ['unknown', ]))
|
||||
msg = ",".join(
|
||||
data.get(
|
||||
'message',
|
||||
[
|
||||
'unknown',
|
||||
],
|
||||
)
|
||||
)
|
||||
raise SearxEngineAPIException('API error::' + msg)
|
||||
|
||||
# raise for other errors
|
||||
|
@ -155,11 +161,13 @@ def response(resp):
|
|||
|
||||
if mainline_type == 'web':
|
||||
content = item['desc']
|
||||
results.append({
|
||||
'title': title,
|
||||
'url': res_url,
|
||||
'content': content,
|
||||
})
|
||||
results.append(
|
||||
{
|
||||
'title': title,
|
||||
'url': res_url,
|
||||
'content': content,
|
||||
}
|
||||
)
|
||||
|
||||
elif mainline_type == 'news':
|
||||
|
||||
|
@ -170,23 +178,27 @@ def response(resp):
|
|||
img_src = None
|
||||
if news_media:
|
||||
img_src = news_media[0].get('pict', {}).get('url', None)
|
||||
results.append({
|
||||
'title': title,
|
||||
'url': res_url,
|
||||
'publishedDate': pub_date,
|
||||
'img_src': img_src,
|
||||
})
|
||||
results.append(
|
||||
{
|
||||
'title': title,
|
||||
'url': res_url,
|
||||
'publishedDate': pub_date,
|
||||
'img_src': img_src,
|
||||
}
|
||||
)
|
||||
|
||||
elif mainline_type == 'images':
|
||||
thumbnail = item['thumbnail']
|
||||
img_src = item['media']
|
||||
results.append({
|
||||
'title': title,
|
||||
'url': res_url,
|
||||
'template': 'images.html',
|
||||
'thumbnail_src': thumbnail,
|
||||
'img_src': img_src,
|
||||
})
|
||||
results.append(
|
||||
{
|
||||
'title': title,
|
||||
'url': res_url,
|
||||
'template': 'images.html',
|
||||
'thumbnail_src': thumbnail,
|
||||
'img_src': img_src,
|
||||
}
|
||||
)
|
||||
|
||||
elif mainline_type == 'videos':
|
||||
# some videos do not have a description: while qwant-video
|
||||
|
@ -210,19 +222,18 @@ def response(resp):
|
|||
thumbnail = item['thumbnail']
|
||||
# from some locations (DE and others?) the s2 link do
|
||||
# response a 'Please wait ..' but does not deliver the thumbnail
|
||||
thumbnail = thumbnail.replace(
|
||||
'https://s2.qwant.com',
|
||||
'https://s1.qwant.com', 1
|
||||
thumbnail = thumbnail.replace('https://s2.qwant.com', 'https://s1.qwant.com', 1)
|
||||
results.append(
|
||||
{
|
||||
'title': title,
|
||||
'url': res_url,
|
||||
'content': content,
|
||||
'publishedDate': pub_date,
|
||||
'thumbnail': thumbnail,
|
||||
'template': 'videos.html',
|
||||
'length': length,
|
||||
}
|
||||
)
|
||||
results.append({
|
||||
'title': title,
|
||||
'url': res_url,
|
||||
'content': content,
|
||||
'publishedDate': pub_date,
|
||||
'thumbnail': thumbnail,
|
||||
'template': 'videos.html',
|
||||
'length': length,
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
@ -232,7 +243,7 @@ def _fetch_supported_languages(resp):
|
|||
# list of regions is embedded in page as a js object
|
||||
response_text = resp.text
|
||||
response_text = response_text[response_text.find('INITIAL_PROPS'):]
|
||||
response_text = response_text[response_text.find('{'):response_text.find('</script>')]
|
||||
response_text = response_text[response_text.find('{'): response_text.find('</script>')]
|
||||
|
||||
regions_json = loads(response_text)
|
||||
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
Seznam
|
||||
"""
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from urllib.parse import urlencode, urlparse
|
||||
from lxml import html
|
||||
from searx.network import get
|
||||
from searx.poolrequests import get
|
||||
from searx.exceptions import SearxEngineAccessDeniedException
|
||||
from searx.utils import (
|
||||
extract_text,
|
||||
|
@ -46,7 +46,8 @@ def request(query, params):
|
|||
|
||||
|
||||
def response(resp):
|
||||
if resp.url.path.startswith('/verify'):
|
||||
resp_url = urlparse(resp.url)
|
||||
if resp_url.path.startswith('/verify'):
|
||||
raise SearxEngineAccessDeniedException()
|
||||
|
||||
results = []
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""Słownik Języka Polskiego (general)
|
||||
# lint: pylint
|
||||
"""Słownik Języka Polskiego
|
||||
|
||||
Dictionary of the polish language from PWN (sjp.pwn)
|
||||
"""
|
||||
|
||||
from lxml.html import fromstring
|
||||
from searx import logger
|
||||
from searx.utils import extract_text
|
||||
from searx.network import raise_for_httperror
|
||||
from searx.raise_for_httperror import raise_for_httperror
|
||||
|
||||
logger = logger.getChild('sjp engine')
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from lxml import html
|
|||
from dateutil import parser
|
||||
from urllib.parse import quote_plus, urlencode
|
||||
from searx import logger
|
||||
from searx.network import get as http_get
|
||||
from searx.poolrequests import get as http_get
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
|
|
@ -5,10 +5,9 @@
|
|||
|
||||
from json import loads
|
||||
from urllib.parse import urlencode
|
||||
import requests
|
||||
import base64
|
||||
|
||||
from searx.network import post as http_post
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": 'https://www.spotify.com',
|
||||
|
@ -39,7 +38,7 @@ def request(query, params):
|
|||
|
||||
params['url'] = search_url.format(query=urlencode({'q': query}), offset=offset)
|
||||
|
||||
r = http_post(
|
||||
r = requests.post(
|
||||
'https://accounts.spotify.com/api/token',
|
||||
data={'grant_type': 'client_credentials'},
|
||||
headers={'Authorization': 'Basic ' + base64.b64encode(
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
"""
|
||||
Stackoverflow (IT)
|
||||
"""
|
||||
|
||||
from urllib.parse import urlencode, urljoin, urlparse
|
||||
from lxml import html
|
||||
from searx.utils import extract_text
|
||||
from searx.exceptions import SearxEngineCaptchaException
|
||||
|
||||
# about
|
||||
about = {
|
||||
"website": 'https://stackoverflow.com/',
|
||||
"wikidata_id": 'Q549037',
|
||||
"official_api_documentation": 'https://api.stackexchange.com/docs',
|
||||
"use_official_api": False,
|
||||
"require_api_key": False,
|
||||
"results": 'HTML',
|
||||
}
|
||||
|
||||
# engine dependent config
|
||||
categories = ['it']
|
||||
paging = True
|
||||
|
||||
# search-url
|
||||
url = 'https://stackoverflow.com/'
|
||||
search_url = url + 'search?{query}&page={pageno}'
|
||||
|
||||
# specific xpath variables
|
||||
results_xpath = '//div[contains(@class,"question-summary")]'
|
||||
link_xpath = './/div[@class="result-link"]//a|.//div[@class="summary"]//h3//a'
|
||||
content_xpath = './/div[@class="excerpt"]'
|
||||
|
||||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
params['url'] = search_url.format(query=urlencode({'q': query}), pageno=params['pageno'])
|
||||
|
||||
return params
|
||||
|
||||
|
||||
# get response from search-request
|
||||
def response(resp):
|
||||
resp_url = urlparse(resp.url)
|
||||
if resp_url.path.startswith('/nocaptcha'):
|
||||
raise SearxEngineCaptchaException()
|
||||
|
||||
results = []
|
||||
|
||||
dom = html.fromstring(resp.text)
|
||||
|
||||
# parse results
|
||||
for result in dom.xpath(results_xpath):
|
||||
link = result.xpath(link_xpath)[0]
|
||||
href = urljoin(url, link.attrib.get('href'))
|
||||
title = extract_text(link)
|
||||
content = extract_text(result.xpath(content_xpath))
|
||||
|
||||
# append result
|
||||
results.append({'url': href,
|
||||
'title': title,
|
||||
'content': content})
|
||||
|
||||
# return results
|
||||
return results
|
|
@ -17,7 +17,7 @@ from babel import Locale
|
|||
from babel.localedata import locale_identifiers
|
||||
|
||||
from searx import logger
|
||||
from searx.network import get
|
||||
from searx.poolrequests import get
|
||||
from searx.utils import extract_text, eval_xpath, match_language
|
||||
from searx.exceptions import (
|
||||
SearxEngineResponseException,
|
||||
|
|
|
@ -2,10 +2,12 @@
|
|||
Tineye - Reverse search images
|
||||
"""
|
||||
|
||||
from json import loads
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from datetime import datetime
|
||||
from flask_babel import gettext
|
||||
|
||||
from searx import logger
|
||||
|
||||
about = {
|
||||
"website": "https://tineye.com",
|
||||
|
@ -18,13 +20,29 @@ about = {
|
|||
|
||||
categories = ['images']
|
||||
paging = True
|
||||
|
||||
safesearch = False
|
||||
|
||||
|
||||
base_url = 'https://tineye.com'
|
||||
search_string = '/result_json/?page={page}&{query}'
|
||||
|
||||
logger = logger.getChild('tineye')
|
||||
|
||||
FORMAT_NOT_SUPPORTED = gettext(
|
||||
"Could not read that image url. This may be due to an unsupported file"
|
||||
" format. TinEye only supports images that are JPEG, PNG, GIF, BMP, TIFF or WebP."
|
||||
)
|
||||
"""TinEye error message"""
|
||||
|
||||
NO_SIGNATURE_ERROR = gettext(
|
||||
"The image is too simple to find matches. TinEye requires a basic level of"
|
||||
" visual detail to successfully identify matches."
|
||||
)
|
||||
"""TinEye error message"""
|
||||
|
||||
DOWNLOAD_ERROR = gettext("The image could not be downloaded.")
|
||||
"""TinEye error message"""
|
||||
|
||||
|
||||
def request(query, params):
|
||||
params['url'] = base_url +\
|
||||
|
@ -40,47 +58,147 @@ def request(query, params):
|
|||
'TE': 'trailers',
|
||||
})
|
||||
|
||||
query = urlencode({'url': query})
|
||||
|
||||
# see https://github.com/TinEye/pytineye/blob/main/pytineye/api.py
|
||||
params['url'] = base_url + search_string.format(query=query, page=params['pageno'])
|
||||
|
||||
return params
|
||||
|
||||
|
||||
def parse_tineye_match(match_json):
|
||||
"""Takes parsed JSON from the API server and turns it into a :py:obj:`dict`
|
||||
object.
|
||||
|
||||
Attributes `(class Match) <https://github.com/TinEye/pytineye/blob/main/pytineye/api.py>`__
|
||||
|
||||
- `image_url`, link to the result image.
|
||||
- `domain`, domain this result was found on.
|
||||
- `score`, a number (0 to 100) that indicates how closely the images match.
|
||||
- `width`, image width in pixels.
|
||||
- `height`, image height in pixels.
|
||||
- `size`, image area in pixels.
|
||||
- `format`, image format.
|
||||
- `filesize`, image size in bytes.
|
||||
- `overlay`, overlay URL.
|
||||
- `tags`, whether this match belongs to a collection or stock domain.
|
||||
|
||||
- `backlinks`, a list of Backlink objects pointing to the original websites
|
||||
and image URLs. List items are instances of :py:obj:`dict`, (`Backlink
|
||||
<https://github.com/TinEye/pytineye/blob/main/pytineye/api.py>`__):
|
||||
|
||||
- `url`, the image URL to the image.
|
||||
- `backlink`, the original website URL.
|
||||
- `crawl_date`, the date the image was crawled.
|
||||
|
||||
"""
|
||||
|
||||
# HINT: there exists an alternative backlink dict in the domains list / e.g.::
|
||||
#
|
||||
# match_json['domains'][0]['backlinks']
|
||||
|
||||
backlinks = []
|
||||
if "backlinks" in match_json:
|
||||
|
||||
for backlink_json in match_json["backlinks"]:
|
||||
if not isinstance(backlink_json, dict):
|
||||
continue
|
||||
|
||||
crawl_date = backlink_json.get("crawl_date")
|
||||
if crawl_date:
|
||||
crawl_date = datetime.fromisoformat(crawl_date[:-3])
|
||||
else:
|
||||
crawl_date = datetime.min
|
||||
|
||||
backlinks.append({
|
||||
'url': backlink_json.get("url"),
|
||||
'backlink': backlink_json.get("backlink"),
|
||||
'crawl_date': crawl_date,
|
||||
'image_name': backlink_json.get("image_name")}
|
||||
)
|
||||
|
||||
return {
|
||||
'image_url': match_json.get("image_url"),
|
||||
'domain': match_json.get("domain"),
|
||||
'score': match_json.get("score"),
|
||||
'width': match_json.get("width"),
|
||||
'height': match_json.get("height"),
|
||||
'size': match_json.get("size"),
|
||||
'image_format': match_json.get("format"),
|
||||
'filesize': match_json.get("filesize"),
|
||||
'overlay': match_json.get("overlay"),
|
||||
'tags': match_json.get("tags"),
|
||||
'backlinks': backlinks,
|
||||
}
|
||||
|
||||
|
||||
def response(resp):
|
||||
"""Parse HTTP response from TinEye."""
|
||||
results = []
|
||||
# Define wanted results
|
||||
json_data = loads(resp.text)
|
||||
number_of_results = json_data['num_matches']
|
||||
|
||||
for i in json_data['matches']:
|
||||
for i in json_data['matches']:
|
||||
image_format = i['format']
|
||||
width = i['width']
|
||||
height = i['height']
|
||||
thumbnail_src = i['image_url']
|
||||
backlink = i['domains'][0]['backlinks'][0]
|
||||
try:
|
||||
json_data = resp.json()
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
msg = "can't parse JSON response // %s" % exc
|
||||
logger.error(msg)
|
||||
json_data = {'error': msg}
|
||||
|
||||
url = backlink['backlink']
|
||||
source = backlink['url']
|
||||
title = backlink['image_name']
|
||||
img_src = backlink['url']
|
||||
# handle error codes from Tineye
|
||||
|
||||
# Get and convert published date
|
||||
api_date = backlink['crawl_date'][:-3]
|
||||
publishedDate = datetime.fromisoformat(api_date)
|
||||
if resp.is_error:
|
||||
if resp.status_code in (400, 422):
|
||||
|
||||
# Append results
|
||||
results.append({
|
||||
message = 'HTTP status: %s' % resp.status_code
|
||||
error = json_data.get('error')
|
||||
s_key = json_data.get('suggestions', {}).get('key', '')
|
||||
|
||||
if error and s_key:
|
||||
message = "%s (%s)" % (error, s_key)
|
||||
elif error:
|
||||
message = error
|
||||
|
||||
if s_key == "Invalid image URL":
|
||||
# test https://docs.searxng.org/_static/searxng-wordmark.svg
|
||||
message = FORMAT_NOT_SUPPORTED
|
||||
elif s_key == 'NO_SIGNATURE_ERROR':
|
||||
# test https://pngimg.com/uploads/dot/dot_PNG4.png
|
||||
message = NO_SIGNATURE_ERROR
|
||||
elif s_key == 'Download Error':
|
||||
# test https://notexists
|
||||
message = DOWNLOAD_ERROR
|
||||
|
||||
logger.error(message)
|
||||
|
||||
return results
|
||||
|
||||
resp.raise_for_status()
|
||||
|
||||
# append results from matches
|
||||
for match_json in json_data['matches']:
|
||||
|
||||
tineye_match = parse_tineye_match(match_json)
|
||||
if not tineye_match['backlinks']:
|
||||
continue
|
||||
|
||||
backlink = tineye_match['backlinks'][0]
|
||||
results.append(
|
||||
{
|
||||
'template': 'images.html',
|
||||
'url': url,
|
||||
'thumbnail_src': thumbnail_src,
|
||||
'source': source,
|
||||
'title': title,
|
||||
'img_src': img_src,
|
||||
'format': image_format,
|
||||
'widht': width,
|
||||
'height': height,
|
||||
'publishedDate': publishedDate,
|
||||
})
|
||||
'url': backlink['backlink'],
|
||||
'thumbnail_src': tineye_match['image_url'],
|
||||
'source': backlink['url'],
|
||||
'title': backlink['image_name'],
|
||||
'img_src': backlink['url'],
|
||||
'format': tineye_match['image_format'],
|
||||
'widht': tineye_match['width'],
|
||||
'height': tineye_match['height'],
|
||||
'publishedDate': backlink['crawl_date'],
|
||||
}
|
||||
)
|
||||
|
||||
# Append number of results
|
||||
results.append({'number_of_results': number_of_results})
|
||||
# append number of results
|
||||
number_of_results = json_data.get('num_matches')
|
||||
if number_of_results:
|
||||
results.append({'number_of_results': number_of_results})
|
||||
|
||||
return results
|
||||
|
|
|
@ -13,7 +13,7 @@ about = {
|
|||
"results": 'JSON',
|
||||
}
|
||||
|
||||
engine_type = 'online_dictionnary'
|
||||
engine_type = 'online_dictionary'
|
||||
categories = ['general']
|
||||
url = 'https://api.mymemory.translated.net/get?q={query}&langpair={from_lang}|{to_lang}{key}'
|
||||
web_url = 'https://mymemory.translated.net/en/{from_lang}/{to_lang}/{query}'
|
||||
|
|
|
@ -12,7 +12,7 @@ from babel.dates import format_datetime, format_date, format_time, get_datetime_
|
|||
|
||||
from searx import logger
|
||||
from searx.data import WIKIDATA_UNITS
|
||||
from searx.network import post, get
|
||||
from searx.poolrequests import post, get
|
||||
from searx.utils import match_language, searx_useragent, get_string_replaces_function
|
||||
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
|
||||
from searx.engines.wikipedia import _fetch_supported_languages, supported_languages_url # NOQA # pylint: disable=unused-import
|
||||
|
@ -64,6 +64,7 @@ WHERE
|
|||
mwapi:language "%LANGUAGE%".
|
||||
?item wikibase:apiOutputItem mwapi:item.
|
||||
}
|
||||
hint:Prior hint:runFirst "true".
|
||||
|
||||
%WHERE%
|
||||
|
||||
|
@ -92,6 +93,12 @@ WHERE {
|
|||
}
|
||||
"""
|
||||
|
||||
# see the property "dummy value" of https://www.wikidata.org/wiki/Q2013 (Wikidata)
|
||||
# hard coded here to avoid to an additional SPARQL request when the server starts
|
||||
DUMMY_ENTITY_URLS = set(
|
||||
"http://www.wikidata.org/entity/" + wid for wid in ("Q4115189", "Q13406268", "Q15397819", "Q17339402")
|
||||
)
|
||||
|
||||
|
||||
# https://www.w3.org/TR/sparql11-query/#rSTRING_LITERAL1
|
||||
# https://lists.w3.org/Archives/Public/public-rdf-dawg/2011OctDec/0175.html
|
||||
|
@ -173,7 +180,7 @@ def response(resp):
|
|||
for result in jsonresponse.get('results', {}).get('bindings', []):
|
||||
attribute_result = {key: value['value'] for key, value in result.items()}
|
||||
entity_url = attribute_result['item']
|
||||
if entity_url not in seen_entities:
|
||||
if entity_url not in seen_entities and entity_url not in DUMMY_ENTITY_URLS:
|
||||
seen_entities.add(entity_url)
|
||||
results += get_results(attribute_result, attributes, language)
|
||||
else:
|
||||
|
|
|
@ -7,7 +7,7 @@ from urllib.parse import quote
|
|||
from json import loads
|
||||
from lxml.html import fromstring
|
||||
from searx.utils import match_language, searx_useragent
|
||||
from searx.network import raise_for_httperror
|
||||
from searx.raise_for_httperror import raise_for_httperror
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
|
|
@ -7,7 +7,7 @@ from json import loads
|
|||
from time import time
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from searx.network import get as http_get
|
||||
from searx.poolrequests import get as http_get
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
from lxml.html import fromstring
|
||||
from searx import logger
|
||||
from searx.utils import extract_text
|
||||
from searx.network import raise_for_httperror
|
||||
from searx.raise_for_httperror import raise_for_httperror
|
||||
|
||||
logger = logger.getChild('Wordnik engine')
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ from json import loads
|
|||
from dateutil import parser
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from httpx import DigestAuth
|
||||
from requests.auth import HTTPDigestAuth
|
||||
|
||||
from searx.utils import html_to_text
|
||||
|
||||
|
@ -56,7 +56,7 @@ def request(query, params):
|
|||
search_type=search_type)
|
||||
|
||||
if http_digest_auth_user and http_digest_auth_pass:
|
||||
params['auth'] = DigestAuth(http_digest_auth_user, http_digest_auth_pass)
|
||||
params['auth'] = HTTPDigestAuth(http_digest_auth_user, http_digest_auth_pass)
|
||||
|
||||
# add language tag if specified
|
||||
if params['language'] != 'all':
|
||||
|
|
|
@ -8,7 +8,7 @@ from operator import itemgetter
|
|||
from datetime import datetime
|
||||
from urllib.parse import quote
|
||||
from searx.utils import extract_text, get_torrent_size
|
||||
from searx.network import get as http_get
|
||||
from searx.poolrequests import get as http_get
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
@ -39,7 +39,7 @@ cookies = dict()
|
|||
def init(engine_settings=None):
|
||||
global cookies # pylint: disable=global-variable-not-assigned
|
||||
# initial cookies
|
||||
resp = http_get(url, follow_redirects=False)
|
||||
resp = http_get(url)
|
||||
if resp.ok:
|
||||
for r in resp.history:
|
||||
cookies.update(r.cookies)
|
||||
|
|
|
@ -3,10 +3,10 @@
|
|||
Youtube (Videos)
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
from functools import reduce
|
||||
from json import loads, dumps
|
||||
from urllib.parse import quote_plus
|
||||
from random import random
|
||||
|
||||
# about
|
||||
about = {
|
||||
|
@ -26,7 +26,7 @@ time_range_support = True
|
|||
|
||||
# search-url
|
||||
base_url = 'https://www.youtube.com/results'
|
||||
search_url = base_url + '?search_query={query}&page={page}'
|
||||
search_url = base_url + '?search_query={query}&page={page}&ucbcb=1'
|
||||
time_range_url = '&sp=EgII{time_range}%253D%253D'
|
||||
# the key seems to be constant
|
||||
next_page_url = 'https://www.youtube.com/youtubei/v1/search?key=AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8'
|
||||
|
@ -44,6 +44,7 @@ base_youtube_url = 'https://www.youtube.com/watch?v='
|
|||
|
||||
# do search-request
|
||||
def request(query, params):
|
||||
params['cookies']['CONSENT'] = "PENDING+" + str(random() * 100)
|
||||
if not params['engine_data'].get('next_page_token'):
|
||||
params['url'] = search_url.format(query=quote_plus(query), page=params['pageno'])
|
||||
if params['time_range'] in time_range_dict:
|
||||
|
@ -57,7 +58,6 @@ def request(query, params):
|
|||
})
|
||||
params['headers']['Content-Type'] = 'application/json'
|
||||
|
||||
params['headers']['Cookie'] = "CONSENT=YES+cb.%s-17-p0.en+F+941;" % datetime.now().strftime("%Y%m%d")
|
||||
return params
|
||||
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import inspect
|
|||
import logging
|
||||
from json import JSONDecodeError
|
||||
from urllib.parse import urlparse
|
||||
from httpx import HTTPError, HTTPStatusError
|
||||
from requests.exceptions import RequestException
|
||||
from searx.exceptions import (SearxXPathSyntaxException, SearxEngineXPathException, SearxEngineAPIException,
|
||||
SearxEngineAccessDeniedException)
|
||||
from searx import logger
|
||||
|
@ -60,28 +60,28 @@ def get_trace(traces):
|
|||
return traces[-1]
|
||||
|
||||
|
||||
def get_hostname(exc: HTTPError) -> typing.Optional[None]:
|
||||
def get_hostname(exc: RequestException) -> typing.Optional[None]:
|
||||
url = exc.request.url
|
||||
if url is None and exc.response is not None:
|
||||
url = exc.response.url
|
||||
return urlparse(url).netloc
|
||||
|
||||
|
||||
def get_request_exception_messages(exc: HTTPError)\
|
||||
def get_request_exception_messages(exc: RequestException)\
|
||||
-> typing.Tuple[typing.Optional[str], typing.Optional[str], typing.Optional[str]]:
|
||||
url = None
|
||||
status_code = None
|
||||
reason = None
|
||||
hostname = None
|
||||
if hasattr(exc, 'request') and exc.request is not None:
|
||||
if exc.request is not None:
|
||||
url = exc.request.url
|
||||
if url is None and hasattr(exc, 'response') and exc.respones is not None:
|
||||
if url is None and exc.response is not None:
|
||||
url = exc.response.url
|
||||
if url is not None:
|
||||
hostname = url.host
|
||||
if isinstance(exc, HTTPStatusError):
|
||||
hostname = str(urlparse(url).netloc)
|
||||
if exc.response is not None:
|
||||
status_code = str(exc.response.status_code)
|
||||
reason = exc.response.reason_phrase
|
||||
reason = exc.response.reason
|
||||
return (status_code, reason, hostname)
|
||||
|
||||
|
||||
|
@ -92,7 +92,7 @@ def get_messages(exc, filename) -> typing.Tuple:
|
|||
return (str(exc), )
|
||||
if isinstance(exc, ValueError) and 'lxml' in filename:
|
||||
return (str(exc), )
|
||||
if isinstance(exc, HTTPError):
|
||||
if isinstance(exc, RequestException):
|
||||
return get_request_exception_messages(exc)
|
||||
if isinstance(exc, SearxXPathSyntaxException):
|
||||
return (exc.xpath_str, exc.message)
|
||||
|
|
|
@ -1,188 +0,0 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import asyncio
|
||||
import threading
|
||||
import concurrent.futures
|
||||
from time import time
|
||||
from queue import SimpleQueue
|
||||
from types import MethodType
|
||||
|
||||
import httpx
|
||||
import h2.exceptions
|
||||
|
||||
from .network import get_network, initialize, check_network_configuration
|
||||
from .client import get_loop
|
||||
from .raise_for_httperror import raise_for_httperror
|
||||
|
||||
|
||||
THREADLOCAL = threading.local()
|
||||
|
||||
|
||||
def reset_time_for_thread():
|
||||
THREADLOCAL.total_time = 0
|
||||
|
||||
|
||||
def get_time_for_thread():
|
||||
return THREADLOCAL.total_time
|
||||
|
||||
|
||||
def set_timeout_for_thread(timeout, start_time=None):
|
||||
THREADLOCAL.timeout = timeout
|
||||
THREADLOCAL.start_time = start_time
|
||||
|
||||
|
||||
def set_context_network_name(network_name):
|
||||
THREADLOCAL.network = get_network(network_name)
|
||||
|
||||
|
||||
def get_context_network():
|
||||
try:
|
||||
return THREADLOCAL.network
|
||||
except AttributeError:
|
||||
return get_network()
|
||||
|
||||
|
||||
def request(method, url, **kwargs):
|
||||
"""same as requests/requests/api.py request(...)"""
|
||||
time_before_request = time()
|
||||
|
||||
# timeout (httpx)
|
||||
if 'timeout' in kwargs:
|
||||
timeout = kwargs['timeout']
|
||||
else:
|
||||
timeout = getattr(THREADLOCAL, 'timeout', None)
|
||||
if timeout is not None:
|
||||
kwargs['timeout'] = timeout
|
||||
|
||||
# 2 minutes timeout for the requests without timeout
|
||||
timeout = timeout or 120
|
||||
|
||||
# ajdust actual timeout
|
||||
timeout += 0.2 # overhead
|
||||
start_time = getattr(THREADLOCAL, 'start_time', time_before_request)
|
||||
if start_time:
|
||||
timeout -= time() - start_time
|
||||
|
||||
# raise_for_error
|
||||
check_for_httperror = True
|
||||
if 'raise_for_httperror' in kwargs:
|
||||
check_for_httperror = kwargs['raise_for_httperror']
|
||||
del kwargs['raise_for_httperror']
|
||||
|
||||
# requests compatibility
|
||||
if isinstance(url, bytes):
|
||||
url = url.decode()
|
||||
|
||||
# network
|
||||
network = get_context_network()
|
||||
|
||||
# do request
|
||||
future = asyncio.run_coroutine_threadsafe(network.request(method, url, **kwargs), get_loop())
|
||||
try:
|
||||
response = future.result(timeout)
|
||||
except concurrent.futures.TimeoutError as e:
|
||||
raise httpx.TimeoutException('Timeout', request=None) from e
|
||||
|
||||
# requests compatibility
|
||||
# see also https://www.python-httpx.org/compatibility/#checking-for-4xx5xx-responses
|
||||
response.ok = not response.is_error
|
||||
|
||||
# update total_time.
|
||||
# See get_time_for_thread() and reset_time_for_thread()
|
||||
if hasattr(THREADLOCAL, 'total_time'):
|
||||
time_after_request = time()
|
||||
THREADLOCAL.total_time += time_after_request - time_before_request
|
||||
|
||||
# raise an exception
|
||||
if check_for_httperror:
|
||||
raise_for_httperror(response)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def get(url, **kwargs):
|
||||
kwargs.setdefault('follow_redirects', True)
|
||||
return request('get', url, **kwargs)
|
||||
|
||||
|
||||
def options(url, **kwargs):
|
||||
kwargs.setdefault('follow_redirects', True)
|
||||
return request('options', url, **kwargs)
|
||||
|
||||
|
||||
def head(url, **kwargs):
|
||||
kwargs.setdefault('follow_redirects', False)
|
||||
return request('head', url, **kwargs)
|
||||
|
||||
|
||||
def post(url, data=None, **kwargs):
|
||||
return request('post', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def put(url, data=None, **kwargs):
|
||||
return request('put', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def patch(url, data=None, **kwargs):
|
||||
return request('patch', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def delete(url, **kwargs):
|
||||
return request('delete', url, **kwargs)
|
||||
|
||||
|
||||
async def stream_chunk_to_queue(network, q, method, url, **kwargs):
|
||||
try:
|
||||
async with await network.stream(method, url, **kwargs) as response:
|
||||
q.put(response)
|
||||
# aiter_raw: access the raw bytes on the response without applying any HTTP content decoding
|
||||
# https://www.python-httpx.org/quickstart/#streaming-responses
|
||||
async for chunk in response.aiter_bytes(65536):
|
||||
if len(chunk) > 0:
|
||||
q.put(chunk)
|
||||
except httpx.ResponseClosed as e:
|
||||
# the response was closed
|
||||
pass
|
||||
except (httpx.HTTPError, OSError, h2.exceptions.ProtocolError) as e:
|
||||
q.put(e)
|
||||
finally:
|
||||
q.put(None)
|
||||
|
||||
|
||||
def _close_response_method(self):
|
||||
asyncio.run_coroutine_threadsafe(
|
||||
self.aclose(),
|
||||
get_loop()
|
||||
)
|
||||
|
||||
|
||||
def stream(method, url, **kwargs):
|
||||
"""Replace httpx.stream.
|
||||
|
||||
Usage:
|
||||
stream = poolrequests.stream(...)
|
||||
response = next(stream)
|
||||
for chunk in stream:
|
||||
...
|
||||
|
||||
httpx.Client.stream requires to write the httpx.HTTPTransport version of the
|
||||
the httpx.AsyncHTTPTransport declared above.
|
||||
"""
|
||||
q = SimpleQueue()
|
||||
future = asyncio.run_coroutine_threadsafe(stream_chunk_to_queue(get_network(), q, method, url, **kwargs),
|
||||
get_loop())
|
||||
# yield response
|
||||
response = q.get()
|
||||
if isinstance(response, Exception):
|
||||
raise response
|
||||
response.close = MethodType(_close_response_method, response)
|
||||
yield response
|
||||
|
||||
# yield chunks
|
||||
chunk_or_exception = q.get()
|
||||
while chunk_or_exception is not None:
|
||||
if isinstance(chunk_or_exception, Exception):
|
||||
raise chunk_or_exception
|
||||
yield chunk_or_exception
|
||||
chunk_or_exception = q.get()
|
||||
future.result()
|
|
@ -1,167 +0,0 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import threading
|
||||
import uvloop
|
||||
|
||||
import httpx
|
||||
from httpx_socks import AsyncProxyTransport
|
||||
from python_socks import (
|
||||
parse_proxy_url,
|
||||
ProxyConnectionError,
|
||||
ProxyTimeoutError,
|
||||
ProxyError
|
||||
)
|
||||
import python_socks._errors
|
||||
|
||||
from searx import logger
|
||||
|
||||
|
||||
logger = logger.getChild('searx.http.client')
|
||||
LOOP = None
|
||||
SSLCONTEXTS = {}
|
||||
TRANSPORT_KWARGS = {
|
||||
'trust_env': False,
|
||||
}
|
||||
|
||||
|
||||
def get_sslcontexts(proxy_url=None, cert=None, verify=True, trust_env=True, http2=False):
|
||||
global SSLCONTEXTS
|
||||
key = (proxy_url, cert, verify, trust_env, http2)
|
||||
if key not in SSLCONTEXTS:
|
||||
SSLCONTEXTS[key] = httpx.create_ssl_context(cert, verify, trust_env, http2)
|
||||
return SSLCONTEXTS[key]
|
||||
|
||||
|
||||
class AsyncHTTPTransportNoHttp(httpx.AsyncHTTPTransport):
|
||||
"""Block HTTP request"""
|
||||
|
||||
async def handle_async_request(self, request):
|
||||
raise httpx.UnsupportedProtocol('HTTP protocol is disabled')
|
||||
|
||||
|
||||
class AsyncProxyTransportFixed(AsyncProxyTransport):
|
||||
"""Fix httpx_socks.AsyncProxyTransport
|
||||
|
||||
Map python_socks exceptions to httpx.ProxyError exceptions
|
||||
"""
|
||||
|
||||
async def handle_async_request(self, request):
|
||||
try:
|
||||
return await super().handle_async_request(request)
|
||||
except ProxyConnectionError as e:
|
||||
raise httpx.ProxyError("ProxyConnectionError: " + e.strerror, request=request) from e
|
||||
except ProxyTimeoutError as e:
|
||||
raise httpx.ProxyError("ProxyTimeoutError: " + e.args[0], request=request) from e
|
||||
except ProxyError as e:
|
||||
raise httpx.ProxyError("ProxyError: " + e.args[0], request=request) from e
|
||||
|
||||
|
||||
def get_transport_for_socks_proxy(verify, http2, local_address, proxy_url, limit, retries):
|
||||
# support socks5h (requests compatibility):
|
||||
# https://requests.readthedocs.io/en/master/user/advanced/#socks
|
||||
# socks5:// hostname is resolved on client side
|
||||
# socks5h:// hostname is resolved on proxy side
|
||||
rdns = False
|
||||
socks5h = 'socks5h://'
|
||||
if proxy_url.startswith(socks5h):
|
||||
proxy_url = 'socks5://' + proxy_url[len(socks5h):]
|
||||
rdns = True
|
||||
|
||||
proxy_type, proxy_host, proxy_port, proxy_username, proxy_password = parse_proxy_url(proxy_url)
|
||||
verify = get_sslcontexts(proxy_url, None, True, False, http2) if verify is True else verify
|
||||
return AsyncProxyTransportFixed(
|
||||
proxy_type=proxy_type,
|
||||
proxy_host=proxy_host,
|
||||
proxy_port=proxy_port,
|
||||
username=proxy_username,
|
||||
password=proxy_password,
|
||||
rdns=rdns,
|
||||
loop=get_loop(),
|
||||
verify=verify,
|
||||
http2=http2,
|
||||
local_address=local_address,
|
||||
limits=limit,
|
||||
retries=retries,
|
||||
**TRANSPORT_KWARGS,
|
||||
)
|
||||
|
||||
|
||||
def get_transport(verify, http2, local_address, proxy_url, limit, retries):
|
||||
verify = get_sslcontexts(None, None, True, False, http2) if verify is True else verify
|
||||
return httpx.AsyncHTTPTransport(
|
||||
# pylint: disable=protected-access
|
||||
verify=verify,
|
||||
http2=http2,
|
||||
limits=limit,
|
||||
proxy=httpx._config.Proxy(proxy_url) if proxy_url else None,
|
||||
local_address=local_address,
|
||||
retries=retries,
|
||||
**TRANSPORT_KWARGS,
|
||||
)
|
||||
|
||||
|
||||
def iter_proxies(proxies):
|
||||
# https://www.python-httpx.org/compatibility/#proxy-keys
|
||||
if isinstance(proxies, str):
|
||||
yield 'all://', proxies
|
||||
elif isinstance(proxies, dict):
|
||||
for pattern, proxy_url in proxies.items():
|
||||
yield pattern, proxy_url
|
||||
|
||||
|
||||
def new_client(enable_http, verify, enable_http2,
|
||||
max_connections, max_keepalive_connections, keepalive_expiry,
|
||||
proxies, local_address, retries, max_redirects, hook_log_response):
|
||||
limit = httpx.Limits(max_connections=max_connections,
|
||||
max_keepalive_connections=max_keepalive_connections,
|
||||
keepalive_expiry=keepalive_expiry)
|
||||
# See https://www.python-httpx.org/advanced/#routing
|
||||
mounts = {}
|
||||
for pattern, proxy_url in iter_proxies(proxies):
|
||||
if not enable_http and (pattern == 'http' or pattern.startswith('http://')):
|
||||
continue
|
||||
if proxy_url.startswith('socks4://') \
|
||||
or proxy_url.startswith('socks5://') \
|
||||
or proxy_url.startswith('socks5h://'):
|
||||
mounts[pattern] = get_transport_for_socks_proxy(verify, enable_http2, local_address, proxy_url, limit,
|
||||
retries)
|
||||
else:
|
||||
mounts[pattern] = get_transport(verify, enable_http2, local_address, proxy_url, limit, retries)
|
||||
|
||||
if not enable_http:
|
||||
mounts['http://'] = AsyncHTTPTransportNoHttp()
|
||||
|
||||
transport = get_transport(verify, enable_http2, local_address, None, limit, retries)
|
||||
event_hooks = None
|
||||
if hook_log_response:
|
||||
event_hooks = {'response': [hook_log_response]}
|
||||
return httpx.AsyncClient(transport=transport, mounts=mounts, max_redirects=max_redirects, event_hooks=event_hooks)
|
||||
|
||||
|
||||
def get_loop():
|
||||
global LOOP
|
||||
return LOOP
|
||||
|
||||
|
||||
def init():
|
||||
# log
|
||||
for logger_name in ('hpack.hpack', 'hpack.table'):
|
||||
logging.getLogger(logger_name).setLevel(logging.WARNING)
|
||||
|
||||
# loop
|
||||
def loop_thread():
|
||||
global LOOP
|
||||
LOOP = asyncio.new_event_loop()
|
||||
LOOP.run_forever()
|
||||
|
||||
th = threading.Thread(
|
||||
target=loop_thread,
|
||||
name='asyncio_loop',
|
||||
daemon=True,
|
||||
)
|
||||
th.start()
|
||||
|
||||
|
||||
init()
|
|
@ -1,402 +0,0 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# lint: pylint
|
||||
# pylint: disable=global-statement
|
||||
# pylint: disable=missing-module-docstring, missing-class-docstring
|
||||
|
||||
import atexit
|
||||
import asyncio
|
||||
import ipaddress
|
||||
from itertools import cycle
|
||||
from typing import Dict
|
||||
|
||||
import httpx
|
||||
|
||||
from searx import logger, searx_debug
|
||||
from .client import new_client, get_loop, AsyncHTTPTransportNoHttp
|
||||
|
||||
|
||||
logger = logger.getChild('network')
|
||||
DEFAULT_NAME = '__DEFAULT__'
|
||||
NETWORKS: Dict[str, 'Network'] = {}
|
||||
# requests compatibility when reading proxy settings from settings.yml
|
||||
PROXY_PATTERN_MAPPING = {
|
||||
'http': 'http://',
|
||||
'https': 'https://',
|
||||
'socks4': 'socks4://',
|
||||
'socks5': 'socks5://',
|
||||
'socks5h': 'socks5h://',
|
||||
'http:': 'http://',
|
||||
'https:': 'https://',
|
||||
'socks4:': 'socks4://',
|
||||
'socks5:': 'socks5://',
|
||||
'socks5h:': 'socks5h://',
|
||||
}
|
||||
|
||||
ADDRESS_MAPPING = {'ipv4': '0.0.0.0', 'ipv6': '::'}
|
||||
|
||||
|
||||
class Network:
|
||||
|
||||
__slots__ = (
|
||||
'enable_http',
|
||||
'verify',
|
||||
'enable_http2',
|
||||
'max_connections',
|
||||
'max_keepalive_connections',
|
||||
'keepalive_expiry',
|
||||
'local_addresses',
|
||||
'proxies',
|
||||
'using_tor_proxy',
|
||||
'max_redirects',
|
||||
'retries',
|
||||
'retry_on_http_error',
|
||||
'_local_addresses_cycle',
|
||||
'_proxies_cycle',
|
||||
'_clients',
|
||||
'_logger',
|
||||
)
|
||||
|
||||
_TOR_CHECK_RESULT = {}
|
||||
|
||||
def __init__(
|
||||
# pylint: disable=too-many-arguments
|
||||
self,
|
||||
enable_http=True,
|
||||
verify=True,
|
||||
enable_http2=False,
|
||||
max_connections=None,
|
||||
max_keepalive_connections=None,
|
||||
keepalive_expiry=None,
|
||||
proxies=None,
|
||||
using_tor_proxy=False,
|
||||
local_addresses=None,
|
||||
retries=0,
|
||||
retry_on_http_error=None,
|
||||
max_redirects=30,
|
||||
logger_name=None,
|
||||
):
|
||||
|
||||
self.enable_http = enable_http
|
||||
self.verify = verify
|
||||
self.enable_http2 = enable_http2
|
||||
self.max_connections = max_connections
|
||||
self.max_keepalive_connections = max_keepalive_connections
|
||||
self.keepalive_expiry = keepalive_expiry
|
||||
self.proxies = proxies
|
||||
self.using_tor_proxy = using_tor_proxy
|
||||
self.local_addresses = local_addresses
|
||||
self.retries = retries
|
||||
self.retry_on_http_error = retry_on_http_error
|
||||
self.max_redirects = max_redirects
|
||||
self._local_addresses_cycle = self.get_ipaddress_cycle()
|
||||
self._proxies_cycle = self.get_proxy_cycles()
|
||||
self._clients = {}
|
||||
self._logger = logger.getChild(logger_name) if logger_name else logger
|
||||
self.check_parameters()
|
||||
|
||||
def check_parameters(self):
|
||||
for address in self.iter_ipaddresses():
|
||||
if '/' in address:
|
||||
ipaddress.ip_network(address, False)
|
||||
else:
|
||||
ipaddress.ip_address(address)
|
||||
|
||||
if self.proxies is not None and not isinstance(self.proxies, (str, dict)):
|
||||
raise ValueError('proxies type has to be str, dict or None')
|
||||
|
||||
def iter_ipaddresses(self):
|
||||
local_addresses = self.local_addresses
|
||||
if not local_addresses:
|
||||
return
|
||||
if isinstance(local_addresses, str):
|
||||
local_addresses = [local_addresses]
|
||||
for address in local_addresses:
|
||||
yield address
|
||||
|
||||
def get_ipaddress_cycle(self):
|
||||
while True:
|
||||
count = 0
|
||||
for address in self.iter_ipaddresses():
|
||||
if '/' in address:
|
||||
for a in ipaddress.ip_network(address, False).hosts():
|
||||
yield str(a)
|
||||
count += 1
|
||||
else:
|
||||
a = ipaddress.ip_address(address)
|
||||
yield str(a)
|
||||
count += 1
|
||||
if count == 0:
|
||||
yield None
|
||||
|
||||
def iter_proxies(self):
|
||||
if not self.proxies:
|
||||
return
|
||||
# https://www.python-httpx.org/compatibility/#proxy-keys
|
||||
if isinstance(self.proxies, str):
|
||||
yield 'all://', [self.proxies]
|
||||
else:
|
||||
for pattern, proxy_url in self.proxies.items():
|
||||
pattern = PROXY_PATTERN_MAPPING.get(pattern, pattern)
|
||||
if isinstance(proxy_url, str):
|
||||
proxy_url = [proxy_url]
|
||||
yield pattern, proxy_url
|
||||
|
||||
def get_proxy_cycles(self):
|
||||
proxy_settings = {}
|
||||
for pattern, proxy_urls in self.iter_proxies():
|
||||
proxy_settings[pattern] = cycle(proxy_urls)
|
||||
while True:
|
||||
# pylint: disable=stop-iteration-return
|
||||
yield tuple((pattern, next(proxy_url_cycle)) for pattern, proxy_url_cycle in proxy_settings.items())
|
||||
|
||||
async def log_response(self, response: httpx.Response):
|
||||
request = response.request
|
||||
status = f"{response.status_code} {response.reason_phrase}"
|
||||
response_line = f"{response.http_version} {status}"
|
||||
content_type = response.headers.get("Content-Type")
|
||||
content_type = f' ({content_type})' if content_type else ''
|
||||
self._logger.debug(f'HTTP Request: {request.method} {request.url} "{response_line}"{content_type}')
|
||||
|
||||
@staticmethod
|
||||
async def check_tor_proxy(client: httpx.AsyncClient, proxies) -> bool:
|
||||
if proxies in Network._TOR_CHECK_RESULT:
|
||||
return Network._TOR_CHECK_RESULT[proxies]
|
||||
|
||||
result = True
|
||||
# ignore client._transport because it is not used with all://
|
||||
for transport in client._mounts.values(): # pylint: disable=protected-access
|
||||
if isinstance(transport, AsyncHTTPTransportNoHttp):
|
||||
continue
|
||||
if getattr(transport, '_pool') and getattr(transport._pool, '_rdns', False):
|
||||
continue
|
||||
return False
|
||||
response = await client.get("https://check.torproject.org/api/ip", timeout=10)
|
||||
if not response.json()["IsTor"]:
|
||||
result = False
|
||||
Network._TOR_CHECK_RESULT[proxies] = result
|
||||
return result
|
||||
|
||||
async def get_client(self, verify=None, max_redirects=None):
|
||||
verify = self.verify if verify is None else verify
|
||||
max_redirects = self.max_redirects if max_redirects is None else max_redirects
|
||||
local_address = next(self._local_addresses_cycle)
|
||||
proxies = next(self._proxies_cycle) # is a tuple so it can be part of the key
|
||||
key = (verify, max_redirects, local_address, proxies)
|
||||
hook_log_response = self.log_response if searx_debug else None
|
||||
if key not in self._clients or self._clients[key].is_closed:
|
||||
client = new_client(
|
||||
self.enable_http,
|
||||
verify,
|
||||
self.enable_http2,
|
||||
self.max_connections,
|
||||
self.max_keepalive_connections,
|
||||
self.keepalive_expiry,
|
||||
dict(proxies),
|
||||
local_address,
|
||||
0,
|
||||
max_redirects,
|
||||
hook_log_response,
|
||||
)
|
||||
if self.using_tor_proxy and not await self.check_tor_proxy(client, proxies):
|
||||
await client.aclose()
|
||||
raise httpx.ProxyError('Network configuration problem: not using Tor')
|
||||
self._clients[key] = client
|
||||
return self._clients[key]
|
||||
|
||||
async def aclose(self):
|
||||
async def close_client(client):
|
||||
try:
|
||||
await client.aclose()
|
||||
except httpx.HTTPError:
|
||||
pass
|
||||
|
||||
await asyncio.gather(*[close_client(client) for client in self._clients.values()], return_exceptions=False)
|
||||
|
||||
@staticmethod
|
||||
def extract_kwargs_clients(kwargs):
|
||||
kwargs_clients = {}
|
||||
if 'verify' in kwargs:
|
||||
kwargs_clients['verify'] = kwargs.pop('verify')
|
||||
if 'max_redirects' in kwargs:
|
||||
kwargs_clients['max_redirects'] = kwargs.pop('max_redirects')
|
||||
if 'allow_redirects' in kwargs:
|
||||
# see https://github.com/encode/httpx/pull/1808
|
||||
kwargs['follow_redirects'] = kwargs.pop('allow_redirects')
|
||||
return kwargs_clients
|
||||
|
||||
def is_valid_response(self, response):
|
||||
# pylint: disable=too-many-boolean-expressions
|
||||
if (
|
||||
(self.retry_on_http_error is True and 400 <= response.status_code <= 599)
|
||||
or (isinstance(self.retry_on_http_error, list) and response.status_code in self.retry_on_http_error)
|
||||
or (isinstance(self.retry_on_http_error, int) and response.status_code == self.retry_on_http_error)
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
async def call_client(self, stream, method, url, **kwargs):
|
||||
retries = self.retries
|
||||
was_disconnected = False
|
||||
kwargs_clients = Network.extract_kwargs_clients(kwargs)
|
||||
while retries >= 0: # pragma: no cover
|
||||
client = await self.get_client(**kwargs_clients)
|
||||
try:
|
||||
if stream:
|
||||
response = client.stream(method, url, **kwargs)
|
||||
else:
|
||||
response = await client.request(method, url, **kwargs)
|
||||
if self.is_valid_response(response) or retries <= 0:
|
||||
return response
|
||||
except httpx.RemoteProtocolError as e:
|
||||
if not was_disconnected:
|
||||
# the server has closed the connection:
|
||||
# try again without decreasing the retries variable & with a new HTTP client
|
||||
was_disconnected = True
|
||||
await client.aclose()
|
||||
self._logger.warning('httpx.RemoteProtocolError: the server has disconnected, retrying')
|
||||
continue
|
||||
if retries <= 0:
|
||||
raise e
|
||||
except (httpx.RequestError, httpx.HTTPStatusError) as e:
|
||||
if retries <= 0:
|
||||
raise e
|
||||
retries -= 1
|
||||
|
||||
async def request(self, method, url, **kwargs):
|
||||
return await self.call_client(False, method, url, **kwargs)
|
||||
|
||||
async def stream(self, method, url, **kwargs):
|
||||
return await self.call_client(True, method, url, **kwargs)
|
||||
|
||||
@classmethod
|
||||
async def aclose_all(cls):
|
||||
await asyncio.gather(*[network.aclose() for network in NETWORKS.values()], return_exceptions=False)
|
||||
|
||||
|
||||
def get_network(name=None):
|
||||
return NETWORKS.get(name or DEFAULT_NAME)
|
||||
|
||||
|
||||
def check_network_configuration():
|
||||
async def check():
|
||||
exception_count = 0
|
||||
for network in NETWORKS.values():
|
||||
if network.using_tor_proxy:
|
||||
try:
|
||||
await network.get_client()
|
||||
except Exception: # pylint: disable=broad-except
|
||||
network._logger.exception('Error') # pylint: disable=protected-access
|
||||
exception_count += 1
|
||||
return exception_count
|
||||
|
||||
future = asyncio.run_coroutine_threadsafe(check(), get_loop())
|
||||
exception_count = future.result()
|
||||
if exception_count > 0:
|
||||
raise RuntimeError("Invalid network configuration")
|
||||
|
||||
|
||||
def initialize(settings_engines=None, settings_outgoing=None):
|
||||
# pylint: disable=import-outside-toplevel)
|
||||
from searx.engines import engines
|
||||
from searx import settings
|
||||
|
||||
# pylint: enable=import-outside-toplevel)
|
||||
|
||||
settings_engines = settings_engines or settings['engines']
|
||||
settings_outgoing = settings_outgoing or settings['outgoing']
|
||||
|
||||
# default parameters for AsyncHTTPTransport
|
||||
# see https://github.com/encode/httpx/blob/e05a5372eb6172287458b37447c30f650047e1b8/httpx/_transports/default.py#L108-L121 # nopep8
|
||||
default_params = {
|
||||
'enable_http': False,
|
||||
'verify': True,
|
||||
'enable_http2': settings_outgoing.get('enable_http2', True),
|
||||
'max_connections': settings_outgoing.get('pool_connections', 100),
|
||||
'max_keepalive_connections': settings_outgoing.get('pool_maxsize', 10),
|
||||
'keepalive_expiry': settings_outgoing.get('keepalive_expiry', 5.0),
|
||||
'local_addresses': settings_outgoing.get('source_ips', []),
|
||||
'using_tor_proxy': settings_outgoing.get('using_tor_proxy', False),
|
||||
'proxies': settings_outgoing.get('proxies', None),
|
||||
'max_redirects': settings_outgoing.get('max_redirects', 30),
|
||||
'retries': settings_outgoing.get('retries', 0),
|
||||
'retry_on_http_error': None,
|
||||
}
|
||||
|
||||
def new_network(params, logger_name=None):
|
||||
nonlocal default_params
|
||||
result = {}
|
||||
result.update(default_params)
|
||||
result.update(params)
|
||||
if logger_name:
|
||||
result['logger_name'] = logger_name
|
||||
return Network(**result)
|
||||
|
||||
def iter_networks():
|
||||
nonlocal settings_engines
|
||||
for engine_spec in settings_engines:
|
||||
engine_name = engine_spec['name']
|
||||
engine = engines.get(engine_name)
|
||||
if engine is None:
|
||||
continue
|
||||
network = getattr(engine, 'network', None)
|
||||
yield engine_name, engine, network
|
||||
|
||||
if NETWORKS:
|
||||
done()
|
||||
NETWORKS.clear()
|
||||
NETWORKS[DEFAULT_NAME] = new_network({}, logger_name='default')
|
||||
NETWORKS['ipv4'] = new_network({'local_addresses': '0.0.0.0'}, logger_name='ipv4')
|
||||
NETWORKS['ipv6'] = new_network({'local_addresses': '::'}, logger_name='ipv6')
|
||||
|
||||
# define networks from outgoing.networks
|
||||
for network_name, network in settings_outgoing.get('networks', {}).items():
|
||||
NETWORKS[network_name] = new_network(network, logger_name=network_name)
|
||||
|
||||
# define networks from engines.[i].network (except references)
|
||||
for engine_name, engine, network in iter_networks():
|
||||
if network is None:
|
||||
network = {}
|
||||
for attribute_name, attribute_value in default_params.items():
|
||||
if hasattr(engine, attribute_name):
|
||||
network[attribute_name] = getattr(engine, attribute_name)
|
||||
else:
|
||||
network[attribute_name] = attribute_value
|
||||
NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
|
||||
elif isinstance(network, dict):
|
||||
NETWORKS[engine_name] = new_network(network, logger_name=engine_name)
|
||||
|
||||
# define networks from engines.[i].network (references)
|
||||
for engine_name, engine, network in iter_networks():
|
||||
if isinstance(network, str):
|
||||
NETWORKS[engine_name] = NETWORKS[network]
|
||||
|
||||
# the /image_proxy endpoint has a dedicated network.
|
||||
# same parameters than the default network, but HTTP/2 is disabled.
|
||||
# It decreases the CPU load average, and the total time is more or less the same
|
||||
if 'image_proxy' not in NETWORKS:
|
||||
image_proxy_params = default_params.copy()
|
||||
image_proxy_params['enable_http2'] = False
|
||||
NETWORKS['image_proxy'] = new_network(image_proxy_params, logger_name='image_proxy')
|
||||
|
||||
|
||||
@atexit.register
|
||||
def done():
|
||||
"""Close all HTTP client
|
||||
|
||||
Avoid a warning at exit
|
||||
see https://github.com/encode/httpx/blob/1a6e254f72d9fd5694a1c10a28927e193ab4f76b/httpx/_client.py#L1785
|
||||
|
||||
Note: since Network.aclose has to be async, it is not possible to call this method on Network.__del__
|
||||
So Network.aclose is called here using atexit.register
|
||||
"""
|
||||
try:
|
||||
loop = get_loop()
|
||||
if loop:
|
||||
future = asyncio.run_coroutine_threadsafe(Network.aclose_all(), loop)
|
||||
# wait 3 seconds to close the HTTP clients
|
||||
future.result(3)
|
||||
finally:
|
||||
NETWORKS.clear()
|
||||
|
||||
|
||||
NETWORKS[DEFAULT_NAME] = Network()
|
|
@ -34,6 +34,7 @@ from searx.plugins import (oa_doi_rewrite,
|
|||
self_info,
|
||||
hostname_replace,
|
||||
search_on_category_select,
|
||||
search_operators,
|
||||
tracker_url_remover,
|
||||
vim_hotkeys)
|
||||
|
||||
|
@ -171,8 +172,10 @@ plugins.register(infinite_scroll)
|
|||
plugins.register(self_info)
|
||||
plugins.register(hostname_replace)
|
||||
plugins.register(search_on_category_select)
|
||||
plugins.register(search_operators)
|
||||
plugins.register(tracker_url_remover)
|
||||
plugins.register(vim_hotkeys)
|
||||
|
||||
# load external plugins
|
||||
if 'plugins' in settings:
|
||||
plugins.register(*settings['plugins'], external=True)
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
import shlex
|
||||
import string
|
||||
|
||||
from flask_babel import gettext
|
||||
|
||||
name = gettext("Search operators")
|
||||
description = gettext("""Filter results using hyphen, site: and -site:.
|
||||
Please note that you might get less results with the additional filtering.""")
|
||||
default_on = False
|
||||
|
||||
|
||||
def on_result(request, search, result):
|
||||
q = search.search_query.query
|
||||
qs = shlex.split(q)
|
||||
spitems = [x.lower() for x in qs if ' ' in x]
|
||||
mitems = [x.lower() for x in qs if x.startswith('-')]
|
||||
siteitems = [x.lower() for x in qs if x.startswith('site:')]
|
||||
msiteitems = [x.lower() for x in qs if x.startswith('-site:')]
|
||||
url, title, content = (
|
||||
result["url"].lower(),
|
||||
result["title"].lower(),
|
||||
(result.get("content").lower() if result.get("content") else '')
|
||||
)
|
||||
if all((x not in title or x not in content) for x in spitems):
|
||||
return False
|
||||
if all((x in title or x in content) for x in mitems):
|
||||
return False
|
||||
if all(x not in url for x in siteitems):
|
||||
return False
|
||||
if all(x in url for x in msiteitems):
|
||||
return False
|
||||
return True
|
|
@ -0,0 +1,235 @@
|
|||
import sys
|
||||
from time import time
|
||||
from itertools import cycle
|
||||
from threading import local
|
||||
|
||||
import requests
|
||||
|
||||
from searx import settings
|
||||
from searx import logger
|
||||
from searx.raise_for_httperror import raise_for_httperror
|
||||
|
||||
|
||||
logger = logger.getChild('poolrequests')
|
||||
|
||||
|
||||
try:
|
||||
import ssl
|
||||
if ssl.OPENSSL_VERSION_INFO[0:3] < (1, 0, 2):
|
||||
# https://github.com/certifi/python-certifi#1024-bit-root-certificates
|
||||
logger.critical('You are using an old openssl version({0}), please upgrade above 1.0.2!'
|
||||
.format(ssl.OPENSSL_VERSION))
|
||||
sys.exit(1)
|
||||
except ImportError:
|
||||
ssl = None
|
||||
if not getattr(ssl, "HAS_SNI", False):
|
||||
try:
|
||||
import OpenSSL # pylint: disable=unused-import
|
||||
except ImportError:
|
||||
logger.critical("ssl doesn't support SNI and the pyopenssl module is not installed.\n"
|
||||
"Some HTTPS connections will fail")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
class HTTPAdapterWithConnParams(requests.adapters.HTTPAdapter):
|
||||
|
||||
def __init__(self, pool_connections=requests.adapters.DEFAULT_POOLSIZE,
|
||||
pool_maxsize=requests.adapters.DEFAULT_POOLSIZE,
|
||||
max_retries=requests.adapters.DEFAULT_RETRIES,
|
||||
pool_block=requests.adapters.DEFAULT_POOLBLOCK,
|
||||
**conn_params):
|
||||
if max_retries == requests.adapters.DEFAULT_RETRIES:
|
||||
self.max_retries = requests.adapters.Retry(0, read=False)
|
||||
else:
|
||||
self.max_retries = requests.adapters.Retry.from_int(max_retries)
|
||||
self.config = {}
|
||||
self.proxy_manager = {}
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._pool_connections = pool_connections
|
||||
self._pool_maxsize = pool_maxsize
|
||||
self._pool_block = pool_block
|
||||
self._conn_params = conn_params
|
||||
|
||||
self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block, **conn_params)
|
||||
|
||||
def __setstate__(self, state):
|
||||
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
|
||||
# because self.poolmanager uses a lambda function, which isn't pickleable.
|
||||
self.proxy_manager = {}
|
||||
self.config = {}
|
||||
|
||||
for attr, value in state.items():
|
||||
setattr(self, attr, value)
|
||||
|
||||
self.init_poolmanager(self._pool_connections, self._pool_maxsize,
|
||||
block=self._pool_block, **self._conn_params)
|
||||
|
||||
|
||||
threadLocal = local()
|
||||
connect = settings['outgoing'].get('pool_connections', 100) # Magic number kept from previous code
|
||||
maxsize = settings['outgoing'].get('pool_maxsize', requests.adapters.DEFAULT_POOLSIZE) # Picked from constructor
|
||||
if settings['outgoing'].get('source_ips'):
|
||||
http_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=connect, pool_maxsize=maxsize,
|
||||
source_address=(source_ip, 0))
|
||||
for source_ip in settings['outgoing']['source_ips'])
|
||||
https_adapters = cycle(HTTPAdapterWithConnParams(pool_connections=connect, pool_maxsize=maxsize,
|
||||
source_address=(source_ip, 0))
|
||||
for source_ip in settings['outgoing']['source_ips'])
|
||||
else:
|
||||
http_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=connect, pool_maxsize=maxsize), ))
|
||||
https_adapters = cycle((HTTPAdapterWithConnParams(pool_connections=connect, pool_maxsize=maxsize), ))
|
||||
|
||||
|
||||
class SessionSinglePool(requests.Session):
|
||||
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
# reuse the same adapters
|
||||
self.adapters.clear()
|
||||
|
||||
https_adapter = threadLocal.__dict__.setdefault('https_adapter', next(https_adapters))
|
||||
self.mount('https://', https_adapter)
|
||||
if get_enable_http_protocol():
|
||||
http_adapter = threadLocal.__dict__.setdefault('http_adapter', next(http_adapters))
|
||||
self.mount('http://', http_adapter)
|
||||
|
||||
def close(self):
|
||||
"""Call super, but clear adapters since there are managed globaly"""
|
||||
self.adapters.clear()
|
||||
super().close()
|
||||
|
||||
|
||||
def set_timeout_for_thread(timeout, start_time=None):
|
||||
threadLocal.timeout = timeout
|
||||
threadLocal.start_time = start_time
|
||||
|
||||
|
||||
def set_enable_http_protocol(enable_http):
|
||||
threadLocal.enable_http = enable_http
|
||||
|
||||
|
||||
def get_enable_http_protocol():
|
||||
try:
|
||||
return threadLocal.enable_http
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
|
||||
def reset_time_for_thread():
|
||||
threadLocal.total_time = 0
|
||||
|
||||
|
||||
def get_time_for_thread():
|
||||
return threadLocal.total_time
|
||||
|
||||
|
||||
def get_proxy_cycles(proxy_settings):
|
||||
if not proxy_settings:
|
||||
return None
|
||||
# Backwards compatibility for single proxy in settings.yml
|
||||
for protocol, proxy in proxy_settings.items():
|
||||
if isinstance(proxy, str):
|
||||
proxy_settings[protocol] = [proxy]
|
||||
|
||||
for protocol in proxy_settings:
|
||||
proxy_settings[protocol] = cycle(proxy_settings[protocol])
|
||||
return proxy_settings
|
||||
|
||||
|
||||
GLOBAL_PROXY_CYCLES = get_proxy_cycles(settings['outgoing'].get('proxies'))
|
||||
|
||||
|
||||
def get_proxies(proxy_cycles):
|
||||
if proxy_cycles:
|
||||
return {protocol: next(proxy_cycle) for protocol, proxy_cycle in proxy_cycles.items()}
|
||||
return None
|
||||
|
||||
|
||||
def get_global_proxies():
|
||||
return get_proxies(GLOBAL_PROXY_CYCLES)
|
||||
|
||||
|
||||
def request(method, url, **kwargs):
|
||||
"""same as requests/requests/api.py request(...)"""
|
||||
time_before_request = time()
|
||||
|
||||
# session start
|
||||
session = SessionSinglePool()
|
||||
|
||||
# proxies
|
||||
if not kwargs.get('proxies'):
|
||||
kwargs['proxies'] = get_global_proxies()
|
||||
|
||||
# timeout
|
||||
if 'timeout' in kwargs:
|
||||
timeout = kwargs['timeout']
|
||||
else:
|
||||
timeout = getattr(threadLocal, 'timeout', None)
|
||||
if timeout is not None:
|
||||
kwargs['timeout'] = timeout
|
||||
|
||||
# raise_for_error
|
||||
check_for_httperror = True
|
||||
if 'raise_for_httperror' in kwargs:
|
||||
check_for_httperror = kwargs['raise_for_httperror']
|
||||
del kwargs['raise_for_httperror']
|
||||
|
||||
# do request
|
||||
response = session.request(method=method, url=url, **kwargs)
|
||||
|
||||
time_after_request = time()
|
||||
|
||||
# is there a timeout for this engine ?
|
||||
if timeout is not None:
|
||||
timeout_overhead = 0.2 # seconds
|
||||
# start_time = when the user request started
|
||||
start_time = getattr(threadLocal, 'start_time', time_before_request)
|
||||
search_duration = time_after_request - start_time
|
||||
if search_duration > timeout + timeout_overhead:
|
||||
raise requests.exceptions.Timeout(response=response)
|
||||
|
||||
# session end
|
||||
session.close()
|
||||
|
||||
if hasattr(threadLocal, 'total_time'):
|
||||
threadLocal.total_time += time_after_request - time_before_request
|
||||
|
||||
# raise an exception
|
||||
if check_for_httperror:
|
||||
raise_for_httperror(response)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def get(url, **kwargs):
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('get', url, **kwargs)
|
||||
|
||||
|
||||
def options(url, **kwargs):
|
||||
kwargs.setdefault('allow_redirects', True)
|
||||
return request('options', url, **kwargs)
|
||||
|
||||
|
||||
def head(url, **kwargs):
|
||||
kwargs.setdefault('allow_redirects', False)
|
||||
return request('head', url, **kwargs)
|
||||
|
||||
|
||||
def post(url, data=None, **kwargs):
|
||||
return request('post', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def put(url, data=None, **kwargs):
|
||||
return request('put', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def patch(url, data=None, **kwargs):
|
||||
return request('patch', url, data=data, **kwargs)
|
||||
|
||||
|
||||
def delete(url, **kwargs):
|
||||
return request('delete', url, **kwargs)
|
|
@ -225,11 +225,9 @@ class SwitchableSetting(Setting):
|
|||
raise MissingArgumentException('missing argument: choices')
|
||||
|
||||
def transform_form_items(self, items): # pylint: disable=missing-function-docstring
|
||||
# pylint: disable=no-self-use
|
||||
return items
|
||||
|
||||
def transform_values(self, values): # pylint: disable=missing-function-docstring
|
||||
# pylint: disable=no-self-use
|
||||
return values
|
||||
|
||||
def parse_cookie(self, data): # pylint: disable=missing-function-docstring
|
||||
|
@ -346,6 +344,26 @@ class Preferences:
|
|||
is_locked('autocomplete'),
|
||||
choices=list(autocomplete.backends.keys()) + ['']
|
||||
),
|
||||
'autofocus': MapSetting(
|
||||
settings['ui'].get('autofocus', True),
|
||||
is_locked('autofocus'),
|
||||
map={
|
||||
'0': False,
|
||||
'1': True,
|
||||
'False': False,
|
||||
'True': True
|
||||
}
|
||||
),
|
||||
'archive_today': MapSetting(
|
||||
settings['ui'].get('archive_today', True),
|
||||
is_locked('archive_today'),
|
||||
map={
|
||||
'0': False,
|
||||
'1': True,
|
||||
'False': False,
|
||||
'True': True
|
||||
}
|
||||
),
|
||||
'image_proxy': MapSetting(
|
||||
settings['server'].get('image_proxy', False),
|
||||
is_locked('image_proxy'),
|
||||
|
|
|
@ -25,13 +25,11 @@ from _thread import start_new_thread
|
|||
from searx import settings
|
||||
from searx.answerers import ask
|
||||
from searx.external_bang import get_bang_url
|
||||
from searx.engines import load_engines
|
||||
from searx.results import ResultContainer
|
||||
from searx import logger
|
||||
from searx.plugins import plugins
|
||||
from searx.search.models import EngineRef, SearchQuery
|
||||
from searx.search.processors import PROCESSORS, initialize as initialize_processors
|
||||
from searx.network import check_network_configuration, initialize as initialize_network
|
||||
from searx.search.processors import processors, initialize as initialize_processors
|
||||
from searx.search.checker import initialize as initialize_checker
|
||||
|
||||
|
||||
|
@ -49,14 +47,9 @@ else:
|
|||
sys.exit(1)
|
||||
|
||||
|
||||
def initialize(settings_engines=None, enable_checker=False, check_network=False):
|
||||
def initialize(settings_engines=None, enable_checker=False):
|
||||
settings_engines = settings_engines or settings['engines']
|
||||
load_engines(settings_engines)
|
||||
initialize_network(settings_engines, settings['outgoing'])
|
||||
if check_network:
|
||||
check_network_configuration()
|
||||
initialize_processors(settings_engines)
|
||||
|
||||
if enable_checker:
|
||||
initialize_checker()
|
||||
|
||||
|
@ -111,7 +104,7 @@ class Search:
|
|||
|
||||
# start search-reqest for all selected engines
|
||||
for engineref in self.search_query.engineref_list:
|
||||
processor = PROCESSORS[engineref.name]
|
||||
processor = processors[engineref.name]
|
||||
|
||||
# set default request parameters
|
||||
request_params = processor.get_params(self.search_query, engineref.category)
|
||||
|
@ -154,7 +147,7 @@ class Search:
|
|||
|
||||
for engine_name, query, request_params in requests:
|
||||
th = threading.Thread(
|
||||
target=PROCESSORS[engine_name].search,
|
||||
target=processors[engine_name].search,
|
||||
args=(query, request_params, self.result_container, self.start_time, self.actual_timeout),
|
||||
name=search_id,
|
||||
)
|
||||
|
|
|
@ -9,7 +9,7 @@ import signal
|
|||
|
||||
from searx import logger, settings, searx_debug
|
||||
from searx.exceptions import SearxSettingsException
|
||||
from searx.search.processors import PROCESSORS
|
||||
from searx.search.processors import processors
|
||||
from searx.search.checker import Checker
|
||||
from searx.shared import schedule, storage
|
||||
|
||||
|
@ -55,7 +55,7 @@ def run():
|
|||
'status': 'ok',
|
||||
'engines': {}
|
||||
}
|
||||
for name, processor in PROCESSORS.items():
|
||||
for name, processor in processors.items():
|
||||
logger.debug('Checking %s engine', name)
|
||||
checker = Checker(processor)
|
||||
checker.run()
|
||||
|
|
|
@ -11,9 +11,9 @@ from urllib.parse import urlparse
|
|||
import re
|
||||
from langdetect import detect_langs
|
||||
from langdetect.lang_detect_exception import LangDetectException
|
||||
import httpx
|
||||
import requests.exceptions
|
||||
|
||||
from searx import network, logger
|
||||
from searx import poolrequests, logger
|
||||
from searx.results import ResultContainer
|
||||
from searx.search.models import SearchQuery, EngineRef
|
||||
from searx.search.processors import EngineProcessor
|
||||
|
@ -75,8 +75,8 @@ def _is_url_image(image_url):
|
|||
while retry > 0:
|
||||
a = time()
|
||||
try:
|
||||
network.set_timeout_for_thread(10.0, time())
|
||||
r = network.get(image_url, timeout=10.0, follow_redirects=True, headers={
|
||||
poolrequests.set_timeout_for_thread(10.0, time())
|
||||
r = poolrequests.get(image_url, timeout=10.0, allow_redirects=True, headers={
|
||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:84.0) Gecko/20100101 Firefox/84.0',
|
||||
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
|
||||
'Accept-Language': 'en-US;q=0.5,en;q=0.3',
|
||||
|
@ -90,10 +90,10 @@ def _is_url_image(image_url):
|
|||
if r.headers["content-type"].startswith('image/'):
|
||||
return True
|
||||
return False
|
||||
except httpx.TimeoutException:
|
||||
except requests.exceptions.Timeout:
|
||||
logger.error('Timeout for %s: %i', image_url, int(time() - a))
|
||||
retry -= 1
|
||||
except httpx.HTTPError:
|
||||
except requests.exceptions.RequestException:
|
||||
logger.exception('Exception for %s', image_url)
|
||||
return False
|
||||
|
||||
|
|
|
@ -1,7 +1,5 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
import threading
|
||||
|
||||
from .online import OnlineProcessor
|
||||
from .offline import OfflineProcessor
|
||||
from .online_dictionary import OnlineDictionaryProcessor
|
||||
|
@ -12,9 +10,9 @@ import searx.engines as engines
|
|||
|
||||
|
||||
__all__ = ['EngineProcessor', 'OfflineProcessor', 'OnlineProcessor',
|
||||
'OnlineDictionaryProcessor', 'OnlineCurrencyProcessor', 'PROCESSORS']
|
||||
'OnlineDictionaryProcessor', 'OnlineCurrencyProcessor', 'processors']
|
||||
logger = logger.getChild('search.processors')
|
||||
PROCESSORS = {}
|
||||
processors = {}
|
||||
|
||||
|
||||
def get_processor_class(engine_type):
|
||||
|
@ -29,27 +27,15 @@ def get_processor(engine, engine_name):
|
|||
processor_class = get_processor_class(engine_type)
|
||||
if processor_class:
|
||||
return processor_class(engine, engine_name)
|
||||
return None
|
||||
|
||||
|
||||
def initialize_processor(processor):
|
||||
"""Initialize one processor
|
||||
Call the init function of the engine
|
||||
"""
|
||||
if processor.has_initialize_function:
|
||||
t = threading.Thread(target=processor.initialize, daemon=True)
|
||||
t.start()
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
def initialize(engine_list):
|
||||
"""Initialize all engines and store a processor for each engine in :py:obj:`PROCESSORS`."""
|
||||
for engine_data in engine_list:
|
||||
engine_name = engine_data['name']
|
||||
engine = engines.engines.get(engine_name)
|
||||
if engine:
|
||||
processor = get_processor(engine, engine_name)
|
||||
initialize_processor(processor)
|
||||
if processor is None:
|
||||
engine.logger.error('Error get processor for engine %s', engine_name)
|
||||
else:
|
||||
PROCESSORS[engine_name] = processor
|
||||
engines.initialize_engines(engine_list)
|
||||
for engine_name, engine in engines.engines.items():
|
||||
processor = get_processor(engine, engine_name)
|
||||
if processor is None:
|
||||
logger.error('Error get processor for engine %s', engine_name)
|
||||
else:
|
||||
processors[engine_name] = processor
|
||||
|
|
|
@ -2,32 +2,17 @@
|
|||
|
||||
from abc import abstractmethod, ABC
|
||||
from searx import logger
|
||||
from searx.engines import engines
|
||||
from searx.utils import get_engine_from_settings
|
||||
|
||||
|
||||
logger = logger.getChild('searx.search.processor')
|
||||
|
||||
|
||||
class EngineProcessor(ABC):
|
||||
|
||||
def __init__(self, engine, engine_name):
|
||||
self.engine = engine
|
||||
self.engine_name = engine_name
|
||||
|
||||
def initialize(self):
|
||||
try:
|
||||
self.engine.init(get_engine_from_settings(self.engine_name))
|
||||
except SearxEngineResponseException as exc:
|
||||
logger.warn('Fail to initialize %s // %s', self.engine_name, exc)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
logger.exception('Fail to initialize %s', self.engine_name)
|
||||
else:
|
||||
logger.debug('Initialized %s', self.engine_name)
|
||||
|
||||
@property
|
||||
def has_initialize_function(self):
|
||||
return hasattr(self.engine, 'init')
|
||||
|
||||
def get_params(self, search_query, engine_category):
|
||||
# if paging is not supported, skip
|
||||
if search_query.pageno > 1 and not self.engine.paging:
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
from urllib.parse import urlparse
|
||||
from time import time
|
||||
import threading
|
||||
import asyncio
|
||||
|
||||
import httpx
|
||||
import requests.exceptions
|
||||
|
||||
import searx.network
|
||||
import searx.poolrequests as poolrequests
|
||||
from searx.engines import settings
|
||||
from searx import logger
|
||||
from searx.utils import gen_useragent
|
||||
|
@ -64,6 +64,10 @@ class OnlineProcessor(EngineProcessor):
|
|||
auth=params['auth']
|
||||
)
|
||||
|
||||
# setting engine based proxies
|
||||
if hasattr(self.engine, 'proxies'):
|
||||
request_args['proxies'] = poolrequests.get_proxies(self.engine.proxies)
|
||||
|
||||
# max_redirects
|
||||
max_redirects = params.get('max_redirects')
|
||||
if max_redirects:
|
||||
|
@ -82,9 +86,9 @@ class OnlineProcessor(EngineProcessor):
|
|||
|
||||
# specific type of request (GET or POST)
|
||||
if params['method'] == 'GET':
|
||||
req = searx.network.get
|
||||
req = poolrequests.get
|
||||
else:
|
||||
req = searx.network.post
|
||||
req = poolrequests.post
|
||||
|
||||
request_args['data'] = params['data']
|
||||
|
||||
|
@ -96,8 +100,8 @@ class OnlineProcessor(EngineProcessor):
|
|||
# unexpected redirect : record an error
|
||||
# but the engine might still return valid results.
|
||||
status_code = str(response.status_code or '')
|
||||
reason = response.reason_phrase or ''
|
||||
hostname = response.url.host
|
||||
reason = response.reason or ''
|
||||
hostname = str(urlparse(response.url or '').netloc)
|
||||
record_error(self.engine_name,
|
||||
'{} redirects, maximum: {}'.format(len(response.history), soft_max_redirects),
|
||||
(status_code, reason, hostname))
|
||||
|
@ -125,14 +129,14 @@ class OnlineProcessor(EngineProcessor):
|
|||
|
||||
def search(self, query, params, result_container, start_time, timeout_limit):
|
||||
# set timeout for all HTTP requests
|
||||
searx.network.set_timeout_for_thread(timeout_limit, start_time=start_time)
|
||||
poolrequests.set_timeout_for_thread(timeout_limit, start_time=start_time)
|
||||
# reset the HTTP total time
|
||||
searx.network.reset_time_for_thread()
|
||||
# set the network
|
||||
searx.network.set_context_network_name(self.engine_name)
|
||||
poolrequests.reset_time_for_thread()
|
||||
# enable HTTP only if explicitly enabled
|
||||
poolrequests.set_enable_http_protocol(self.engine.enable_http)
|
||||
|
||||
# suppose everything will be alright
|
||||
http_exception = False
|
||||
requests_exception = False
|
||||
suspended_time = None
|
||||
|
||||
try:
|
||||
|
@ -146,7 +150,7 @@ class OnlineProcessor(EngineProcessor):
|
|||
|
||||
# update engine time when there is no exception
|
||||
engine_time = time() - start_time
|
||||
page_load_time = searx.network.get_time_for_thread()
|
||||
page_load_time = poolrequests.get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
with threading.RLock():
|
||||
self.engine.stats['engine_time'] += engine_time
|
||||
|
@ -159,27 +163,27 @@ class OnlineProcessor(EngineProcessor):
|
|||
|
||||
# Timing
|
||||
engine_time = time() - start_time
|
||||
page_load_time = searx.network.get_time_for_thread()
|
||||
page_load_time = poolrequests.get_time_for_thread()
|
||||
result_container.add_timing(self.engine_name, engine_time, page_load_time)
|
||||
|
||||
# Record the errors
|
||||
with threading.RLock():
|
||||
self.engine.stats['errors'] += 1
|
||||
|
||||
if (issubclass(e.__class__, (httpx.TimeoutException, asyncio.TimeoutError))):
|
||||
if (issubclass(e.__class__, requests.exceptions.Timeout)):
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'HTTP timeout')
|
||||
# requests timeout (connect or read)
|
||||
logger.error("engine {0} : HTTP requests timeout"
|
||||
"(search duration : {1} s, timeout: {2} s) : {3}"
|
||||
.format(self.engine_name, engine_time, timeout_limit, e.__class__.__name__))
|
||||
http_exception = True
|
||||
elif (issubclass(e.__class__, (httpx.HTTPError, httpx.StreamError))):
|
||||
requests_exception = True
|
||||
elif (issubclass(e.__class__, requests.exceptions.RequestException)):
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'HTTP error')
|
||||
# other requests exception
|
||||
logger.exception("engine {0} : requests exception"
|
||||
"(search duration : {1} s, timeout: {2} s) : {3}"
|
||||
.format(self.engine_name, engine_time, timeout_limit, e))
|
||||
http_exception = True
|
||||
requests_exception = True
|
||||
elif (issubclass(e.__class__, SearxEngineCaptchaException)):
|
||||
result_container.add_unresponsive_engine(self.engine_name, 'CAPTCHA required')
|
||||
logger.exception('engine {0} : CAPTCHA'.format(self.engine_name))
|
||||
|
@ -203,7 +207,7 @@ class OnlineProcessor(EngineProcessor):
|
|||
# suspend the engine if there is an HTTP error
|
||||
# or suspended_time is defined
|
||||
with threading.RLock():
|
||||
if http_exception or suspended_time:
|
||||
if requests_exception or suspended_time:
|
||||
# update continuous_errors / suspend_end_time
|
||||
self.engine.continuous_errors += 1
|
||||
if suspended_time is None:
|
||||
|
|
|
@ -5,13 +5,12 @@ import re
|
|||
from searx.utils import is_valid_lang
|
||||
from .online import OnlineProcessor
|
||||
|
||||
|
||||
parser_re = re.compile('.*?([a-z]+)-([a-z]+) ([^ ]+)$', re.I)
|
||||
parser_re = re.compile('.*?([a-z]+)-([a-z]+) (.+)$', re.I)
|
||||
|
||||
|
||||
class OnlineDictionaryProcessor(OnlineProcessor):
|
||||
|
||||
engine_type = 'online_dictionnary'
|
||||
engine_type = 'online_dictionary'
|
||||
|
||||
def get_params(self, search_query, engine_category):
|
||||
params = super().get_params(search_query, engine_category)
|
||||
|
|
|
@ -37,6 +37,8 @@ server:
|
|||
Referrer-Policy : no-referrer
|
||||
|
||||
ui:
|
||||
autofocus : True # Autofocus search input
|
||||
archive_today : False # show archive.today links
|
||||
static_path : "" # Custom static path - leave it blank if you didn't change
|
||||
templates_path : "" # Custom templates path - leave it blank if you didn't change
|
||||
default_theme : oscar # ui theme
|
||||
|
@ -69,17 +71,19 @@ ui:
|
|||
# key : !!binary "your_morty_proxy_key"
|
||||
|
||||
outgoing: # communication with search engines
|
||||
request_timeout : 3.0 # default timeout in seconds, can be override by engine
|
||||
request_timeout : 2.0 # default timeout in seconds, can be override by engine
|
||||
# max_request_timeout: 10.0 # the maximum timeout in seconds
|
||||
useragent_suffix : "" # suffix of searx_useragent, could contain informations like an email address to the administrator
|
||||
pool_connections : 100 # The maximum number of concurrent connections that may be established.
|
||||
pool_maxsize : 20 # Allow the connection pool to maintain keep-alive connections below this point.
|
||||
enable_http2: True # See https://www.python-httpx.org/http2/
|
||||
pool_connections : 100 # Number of different hosts
|
||||
pool_maxsize : 10 # Number of simultaneous requests by host
|
||||
# uncomment below section if you want to use a proxy
|
||||
# see https://2.python-requests.org/en/latest/user/advanced/#proxies
|
||||
# SOCKS proxies are also supported: see https://2.python-requests.org/en/latest/user/advanced/#socks
|
||||
# proxies:
|
||||
# all://:
|
||||
# http:
|
||||
# - http://proxy1:8080
|
||||
# - http://proxy2:8080
|
||||
# https:
|
||||
# - http://proxy1:8080
|
||||
# - http://proxy2:8080
|
||||
# using_tor_proxy : True
|
||||
|
@ -89,7 +93,6 @@ outgoing: # communication with search engines
|
|||
# source_ips:
|
||||
# - 1.1.1.1
|
||||
# - 1.1.1.2
|
||||
# - fe80::/126
|
||||
|
||||
# External plugin configuration
|
||||
# See https://searx.github.io/searx/dev/plugins.html for more details
|
||||
|
@ -378,6 +381,12 @@ engines:
|
|||
require_api_key: false
|
||||
results: HTML
|
||||
|
||||
- name: emojipedia
|
||||
engine: emojipedia
|
||||
timeout: 4.0
|
||||
shortcut: em
|
||||
disabled: True
|
||||
|
||||
# - name : elasticsearch
|
||||
# shortcut : es
|
||||
# engine : elasticsearch
|
||||
|
@ -787,17 +796,23 @@ engines:
|
|||
shortcut : loc
|
||||
categories : images
|
||||
|
||||
- name : lobste.rs
|
||||
engine : xpath
|
||||
search_url : https://lobste.rs/search?utf8=%E2%9C%93&q={query}&what=stories&order=relevance
|
||||
results_xpath : //li[contains(@class, "story")]
|
||||
url_xpath : .//a[@class="u-url"]/@href
|
||||
title_xpath : .//a[@class="u-url"]
|
||||
content_xpath : .//a[@class="domain"]
|
||||
categories : it
|
||||
shortcut : lo
|
||||
timeout : 5.0
|
||||
disabled: True
|
||||
- name: lingva
|
||||
engine: lingva
|
||||
shortcut: lv
|
||||
# set lingva instance in url, by default it will use the official instance
|
||||
# url: https://lingva.ml
|
||||
|
||||
- name: lobste.rs
|
||||
engine: xpath
|
||||
search_url: https://lobste.rs/search?utf8=%E2%9C%93&q={query}&what=stories&order=relevance
|
||||
results_xpath: //li[contains(@class, "story")]
|
||||
url_xpath: .//a[@class="u-url"]/@href
|
||||
title_xpath: .//a[@class="u-url"]
|
||||
content_xpath: .//a[@class="domain"]
|
||||
categories: it
|
||||
shortcut: lo
|
||||
timeout: 5.0
|
||||
disabled: true
|
||||
about:
|
||||
website: https://lobste.rs/
|
||||
wikidata_id: Q60762874
|
||||
|
@ -1027,18 +1042,16 @@ engines:
|
|||
additional_tests:
|
||||
rosebud: *test_rosebud
|
||||
|
||||
- name : qwant images
|
||||
engine : qwant
|
||||
shortcut : qwi
|
||||
disabled: True
|
||||
categories : images
|
||||
|
||||
- name : qwant news
|
||||
engine : qwant
|
||||
shortcut : qwn
|
||||
categories : news
|
||||
network: qwant
|
||||
|
||||
- name: qwant images
|
||||
engine: qwant
|
||||
shortcut: qwi
|
||||
categories: images
|
||||
disabled: True
|
||||
network: qwant
|
||||
|
||||
- name: qwant videos
|
||||
engine: qwant
|
||||
|
@ -1634,7 +1647,7 @@ engines:
|
|||
require_api_key: false
|
||||
results: HTML
|
||||
|
||||
- name: słownik języka polskiego
|
||||
- name: sjp.pwn
|
||||
engine: sjp
|
||||
shortcut: sjp
|
||||
base_url: https://sjp.pwn.pl/
|
||||
|
|
|
@ -45,7 +45,10 @@
|
|||
}
|
||||
/* .leaflet-container svg: reset svg max-width decleration shipped in Joomla! (joomla.org) 3.x */
|
||||
/* .leaflet-container img: map is broken in FF if you have max-width: 100% on tiles */
|
||||
.leaflet-container .leaflet-overlay-pane svg,
|
||||
.leaflet-container .leaflet-overlay-pane svg {
|
||||
max-width: none !important;
|
||||
max-height: none !important;
|
||||
}
|
||||
.leaflet-container .leaflet-marker-pane img,
|
||||
.leaflet-container .leaflet-shadow-pane img,
|
||||
.leaflet-container .leaflet-tile-pane img,
|
||||
|
@ -53,6 +56,8 @@
|
|||
.leaflet-container .leaflet-tile {
|
||||
max-width: none !important;
|
||||
max-height: none !important;
|
||||
width: auto;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.leaflet-container.leaflet-touch-zoom {
|
||||
|
@ -166,9 +171,6 @@
|
|||
|
||||
/* zoom and fade animations */
|
||||
|
||||
.leaflet-fade-anim .leaflet-tile {
|
||||
will-change: opacity;
|
||||
}
|
||||
.leaflet-fade-anim .leaflet-popup {
|
||||
opacity: 0;
|
||||
-webkit-transition: opacity 0.2s linear;
|
||||
|
@ -183,9 +185,10 @@
|
|||
-ms-transform-origin: 0 0;
|
||||
transform-origin: 0 0;
|
||||
}
|
||||
.leaflet-zoom-anim .leaflet-zoom-animated {
|
||||
svg.leaflet-zoom-animated {
|
||||
will-change: transform;
|
||||
}
|
||||
}
|
||||
|
||||
.leaflet-zoom-anim .leaflet-zoom-animated {
|
||||
-webkit-transition: -webkit-transform 0.25s cubic-bezier(0,0,0.25,1);
|
||||
-moz-transition: -moz-transform 0.25s cubic-bezier(0,0,0.25,1);
|
||||
|
@ -251,14 +254,11 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
|
||||
.leaflet-container {
|
||||
background: #ddd;
|
||||
outline: 0;
|
||||
outline-offset: 1px;
|
||||
}
|
||||
.leaflet-container a {
|
||||
color: #0078A8;
|
||||
}
|
||||
.leaflet-container a.leaflet-active {
|
||||
outline: 2px solid orange;
|
||||
}
|
||||
.leaflet-zoom-box {
|
||||
border: 2px dotted #38f;
|
||||
background: rgba(255,255,255,0.5);
|
||||
|
@ -267,7 +267,10 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
|
||||
/* general typography */
|
||||
.leaflet-container {
|
||||
font: 12px/1.5 "Helvetica Neue", Arial, Helvetica, sans-serif;
|
||||
font-family: "Helvetica Neue", Arial, Helvetica, sans-serif;
|
||||
font-size: 12px;
|
||||
font-size: 0.75rem;
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
|
||||
|
@ -277,8 +280,7 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
box-shadow: 0 1px 5px rgba(0,0,0,0.65);
|
||||
border-radius: 4px;
|
||||
}
|
||||
.leaflet-bar a,
|
||||
.leaflet-bar a:hover {
|
||||
.leaflet-bar a {
|
||||
background-color: #fff;
|
||||
border-bottom: 1px solid #ccc;
|
||||
width: 26px;
|
||||
|
@ -295,7 +297,8 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
background-repeat: no-repeat;
|
||||
display: block;
|
||||
}
|
||||
.leaflet-bar a:hover {
|
||||
.leaflet-bar a:hover,
|
||||
.leaflet-bar a:focus {
|
||||
background-color: #f4f4f4;
|
||||
}
|
||||
.leaflet-bar a:first-child {
|
||||
|
@ -385,6 +388,8 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
}
|
||||
.leaflet-control-layers label {
|
||||
display: block;
|
||||
font-size: 13px;
|
||||
font-size: 1.08333em;
|
||||
}
|
||||
.leaflet-control-layers-separator {
|
||||
height: 0;
|
||||
|
@ -393,7 +398,7 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
}
|
||||
|
||||
/* Default icon URLs */
|
||||
.leaflet-default-icon-path {
|
||||
.leaflet-default-icon-path { /* used only in path-guessing heuristic, see L.Icon.Default */
|
||||
background-image: url(images/marker-icon.png);
|
||||
}
|
||||
|
||||
|
@ -402,23 +407,24 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
|
||||
.leaflet-container .leaflet-control-attribution {
|
||||
background: #fff;
|
||||
background: rgba(255, 255, 255, 0.7);
|
||||
background: rgba(255, 255, 255, 0.8);
|
||||
margin: 0;
|
||||
}
|
||||
.leaflet-control-attribution,
|
||||
.leaflet-control-scale-line {
|
||||
padding: 0 5px;
|
||||
color: #333;
|
||||
line-height: 1.4;
|
||||
}
|
||||
.leaflet-control-attribution a {
|
||||
text-decoration: none;
|
||||
}
|
||||
.leaflet-control-attribution a:hover {
|
||||
.leaflet-control-attribution a:hover,
|
||||
.leaflet-control-attribution a:focus {
|
||||
text-decoration: underline;
|
||||
}
|
||||
.leaflet-container .leaflet-control-attribution,
|
||||
.leaflet-container .leaflet-control-scale {
|
||||
font-size: 11px;
|
||||
.leaflet-control-attribution svg {
|
||||
display: inline !important;
|
||||
}
|
||||
.leaflet-left .leaflet-control-scale {
|
||||
margin-left: 5px;
|
||||
|
@ -431,7 +437,6 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
border-top: none;
|
||||
line-height: 1.1;
|
||||
padding: 2px 5px 1px;
|
||||
font-size: 11px;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
-moz-box-sizing: border-box;
|
||||
|
@ -474,17 +479,22 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
border-radius: 12px;
|
||||
}
|
||||
.leaflet-popup-content {
|
||||
margin: 13px 19px;
|
||||
line-height: 1.4;
|
||||
margin: 13px 24px 13px 20px;
|
||||
line-height: 1.3;
|
||||
font-size: 13px;
|
||||
font-size: 1.08333em;
|
||||
min-height: 1px;
|
||||
}
|
||||
.leaflet-popup-content p {
|
||||
margin: 18px 0;
|
||||
margin: 17px 0;
|
||||
margin: 1.3em 0;
|
||||
}
|
||||
.leaflet-popup-tip-container {
|
||||
width: 40px;
|
||||
height: 20px;
|
||||
position: absolute;
|
||||
left: 50%;
|
||||
margin-top: -1px;
|
||||
margin-left: -20px;
|
||||
overflow: hidden;
|
||||
pointer-events: none;
|
||||
|
@ -495,6 +505,7 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
padding: 1px;
|
||||
|
||||
margin: -10px auto 0;
|
||||
pointer-events: auto;
|
||||
|
||||
-webkit-transform: rotate(45deg);
|
||||
-moz-transform: rotate(45deg);
|
||||
|
@ -511,19 +522,18 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
position: absolute;
|
||||
top: 0;
|
||||
right: 0;
|
||||
padding: 4px 4px 0 0;
|
||||
border: none;
|
||||
text-align: center;
|
||||
width: 18px;
|
||||
height: 14px;
|
||||
font: 16px/14px Tahoma, Verdana, sans-serif;
|
||||
color: #c3c3c3;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
font: 16px/24px Tahoma, Verdana, sans-serif;
|
||||
color: #757575;
|
||||
text-decoration: none;
|
||||
font-weight: bold;
|
||||
background: transparent;
|
||||
}
|
||||
.leaflet-container a.leaflet-popup-close-button:hover {
|
||||
color: #999;
|
||||
.leaflet-container a.leaflet-popup-close-button:hover,
|
||||
.leaflet-container a.leaflet-popup-close-button:focus {
|
||||
color: #585858;
|
||||
}
|
||||
.leaflet-popup-scrolled {
|
||||
overflow: auto;
|
||||
|
@ -541,9 +551,6 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
-ms-filter: "progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678)";
|
||||
filter: progid:DXImageTransform.Microsoft.Matrix(M11=0.70710678, M12=0.70710678, M21=-0.70710678, M22=0.70710678);
|
||||
}
|
||||
.leaflet-oldie .leaflet-popup-tip-container {
|
||||
margin-top: -1px;
|
||||
}
|
||||
|
||||
.leaflet-oldie .leaflet-control-zoom,
|
||||
.leaflet-oldie .leaflet-control-layers,
|
||||
|
@ -578,7 +585,7 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
pointer-events: none;
|
||||
box-shadow: 0 1px 3px rgba(0,0,0,0.4);
|
||||
}
|
||||
.leaflet-tooltip.leaflet-clickable {
|
||||
.leaflet-tooltip.leaflet-interactive {
|
||||
cursor: pointer;
|
||||
pointer-events: auto;
|
||||
}
|
||||
|
@ -638,3 +645,13 @@ svg.leaflet-image-layer.leaflet-interactive path {
|
|||
margin-left: -12px;
|
||||
border-right-color: #fff;
|
||||
}
|
||||
|
||||
/* Printing */
|
||||
|
||||
@media print {
|
||||
/* Prevent printers from removing background-images of controls. */
|
||||
.leaflet-control {
|
||||
-webkit-print-color-adjust: exact;
|
||||
color-adjust: exact;
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
|||
{"version":3,"sources":["../css/leaflet.css"],"names":[],"mappings":"AAUA,qBACA,eAPA,qBACA,uBAHA,cAMc,qBADA,kBAJd,cAGA,wBAGA,kBAGC,SAAA,SACA,KAAA,EACA,IAAA,EAED,mBACC,SAAA,OAGD,qBACA,uBAFA,cAGC,oBAAA,KACG,iBAAA,KACK,YAAA,KACN,kBAAA,KAGU,yBACZ,WAAA,IAGe,8BACf,gBAAA,0BAGe,wCACf,MAAA,OACA,OAAA,OACA,yBAAA,EAAA,EAED,qBACA,uBACC,QAAA,MAKuC,4CADC,6CAED,4CAGrB,iCAFmB,0CAChB,2CAErB,UAAA,eACA,WAAA,eAGiB,sCACjB,iBAAA,MAAA,MACA,aAAA,MAAA,MAEiB,sCACjB,iBAAA,WAEA,aAAA,KACA,aAAA,WAEoC,yDACpC,iBAAA,KACA,aAAA,KAED,mBACC,4BAAA,YAEkB,qBAClB,4BAAA,oBAED,cACC,OAAA,QACA,WAAA,OAED,qBACC,WAAA,QAED,kBACC,MAAA,EACA,OAAA,EACA,gBAAA,WACK,WAAA,WACL,QAAA,IAGqB,0BACrB,iBAAA,KAGD,cAAwB,QAAA,IAExB,mBAAwB,QAAA,IACxB,sBAAwB,QAAA,IACxB,qBAAwB,QAAA,IACxB,qBAAwB,QAAA,IACxB,sBAA0B,QAAA,IAC1B,oBAAwB,QAAA,IAEN,yBAAS,QAAA,IACT,sBAAS,QAAA,IAE3B,mBACC,MAAA,IACA,OAAA,IAED,MACC,SAAA,kBACA,QAAA,aACA,SAAA,SAMD,iBACC,SAAA,SACA,QAAA,IACA,eAAA,eACA,eAAA,KAGD,gBADA,aAEC,SAAA,SACA,QAAA,KACA,eAAA,KAED,aACC,IAAA,EAED,eACC,MAAA,EAED,gBACC,OAAA,EAED,cACC,KAAA,EAED,iBACC,MAAA,KACA,MAAA,KAEc,gCACd,MAAA,MAEY,8BACZ,WAAA,KAEe,iCACf,cAAA,KAEa,+BACb,YAAA,KAEc,gCACd,aAAA,KAMkB,iCAClB,YAAA,QAEkB,kCAClB,QAAA,EACA,mBAAA,QAAA,IAAA,OACG,gBAAA,QAAA,IAAA,OACK,WAAA,QAAA,IAAA,OAE4B,oDACpC,QAAA,EAED,uBACC,yBAAA,EAAA,EACI,qBAAA,EAAA,EACI,iBAAA,EAAA,EAEU,0CAClB,YAAA,UAEkB,0CAClB,mBAAA,kBAAA,KAA4C,wBACzC,gBAAA,eAAA,KAAyC,wBACpC,WAAA,UAAA,KAAoC,wBAG3B,gCADC,iCAElB,mBAAA,KACG,gBAAA,KACK,WAAA,KAGU,sCAClB,WAAA,OAMD,qBACC,OAAA,QAED,cACC,OAAA,aACA,OAAA,UACA,OAAA,KAED,mBACmB,wCAClB,OAAA,UAGD,iBADA,oBAEC,OAAA,KAEiB,gCACc,qDACd,4CACjB,OAAA,KACA,OAAA,iBACA,OAAA,cACA,OAAA,SAMD,qBAFA,qBACA,uBAEoB,uBACpB,wBACC,eAAA,KAImB,yCADA,yCAEI,2CACoB,iDAC3C,eAAA,eACA,eAAA,KAKD,mBACC,WAAA,KACA,QAAA,EAEkB,qBAClB,MAAA,QAEmB,oCACnB,QAAA,IAAA,MAAA,OAED,kBACC,OAAA,IAAA,OAAA,KACA,WAAA,qBAKD,mBACC,KAAA,KAAe,IAAA,iBAAA,MAAA,UAAf,WAMD,aACC,WAAA,EAAA,IAAA,IAAA,gBACA,cAAA,IAEY,eACC,qBACb,iBAAA,KACA,cAAA,IAAA,MAAA,KACA,MAAA,KACA,OAAA,KACA,YAAA,KACA,QAAA,MACA,WAAA,OACA,gBAAA,KACA,MAAA,KAEY,eACb,+BACC,oBAAA,IAAA,IACA,kBAAA,UACA,QAAA,MAEa,qBACb,iBAAA,QAEa,2BACb,uBAAA,IACA,wBAAA,IAEa,0BACb,0BAAA,IACA,2BAAA,IACA,cAAA,KAEa,gCACb,OAAA,QACA,iBAAA,QACA,MAAA,KAG2B,8BAC3B,MAAA,KACA,OAAA,KACA,YAAA,KAE4B,0CAC5B,uBAAA,IACA,wBAAA,IAE4B,yCAC5B,0BAAA,IACA,2BAAA,IAKD,yBACA,0BACC,KAAA,IAAA,KAAA,iBAAgB,OAAA,UAChB,YAAA,IAGc,wCAAyC,yCACvD,UAAA,KAMD,wBACC,WAAA,EAAA,IAAA,IAAA,eACA,WAAA,KACA,cAAA,IAED,+BACC,iBAAA,uBACA,MAAA,KACA,OAAA,KAEe,+CACf,iBAAA,0BACA,gBAAA,KAAA,KAEc,8CACd,MAAA,KACA,OAAA,KAEuB,qDACS,gEAChC,QAAA,KAEgC,8DAChC,QAAA,MACA,SAAA,SAED,iCACC,QAAA,IAAA,KAAA,IAAA,IACA,MAAA,KACA,WAAA,KAED,kCACC,WAAA,OACA,WAAA,OACA,cAAA,IAED,iCACC,WAAA,IACA,SAAA,SACA,IAAA,IAEuB,8BACvB,QAAA,MAED,kCACC,OAAA,EACA,WAAA,IAAA,MAAA,KACA,OAAA,IAAA,MAAA,IAAA,KAID,2BACC,iBAAA,4BAMkB,gDAClB,WAAA,KACA,WAAA,qBACA,OAAA,EAED,6BACA,4BACC,QAAA,EAAA,IACA,MAAA,KAE4B,+BAC5B,gBAAA,KAE6B,qCAC7B,gBAAA,UAEkB,gDACA,0CAClB,UAAA,KAEa,qCACb,YAAA,IAEe,uCACf,cAAA,IAED,4BACC,OAAA,IAAA,MAAA,KACA,WAAA,KACA,YAAA,IACA,QAAA,IAAA,IAAA,IACA,UAAA,KACA,YAAA,OACA,SAAA,OACA,gBAAA,WACK,WAAA,WAEL,WAAA,KACA,WAAA,qBAE8B,8CAC9B,WAAA,IAAA,MAAA,KACA,cAAA,KACA,WAAA,KAEgD,+DAChD,cAAA,IAAA,MAAA,KAKc,4BAFA,4CACA,uCAEd,WAAA,KAGc,4BADA,uCAEd,OAAA,IAAA,MAAA,eACA,gBAAA,YAMD,eACC,SAAA,SACA,WAAA,OACA,cAAA,KAED,+BACC,QAAA,IACA,WAAA,KACA,cAAA,KAED,uBACC,OAAA,KAAA,KACA,YAAA,IAEsB,yBACtB,OAAA,KAAA,EAED,6BACC,MAAA,KACA,OAAA,KACA,SAAA,SACA,KAAA,IACA,YAAA,MACA,SAAA,OACA,eAAA,KAED,mBACC,MAAA,KACA,OAAA,KACA,QAAA,IAEA,OAAA,MAAA,KAAA,EAEA,kBAAmB,cAChB,eAAgB,cACf,cAAe,cACX,UAAW,cAEpB,+BACA,mBACC,WAAA,KACA,MAAA,KACA,WAAA,EAAA,IAAA,KAAA,eAEmB,gDACnB,SAAA,SACA,IAAA,EACA,MAAA,EACA,QAAA,IAAA,IAAA,EAAA,EACA,OAAA,KACA,WAAA,OACA,MAAA,KACA,OAAA,KACA,KAAA,KAAA,KAAA,OAAA,QAAA,WACA,MAAA,QACA,gBAAA,KACA,YAAA,IACA,WAAA,IAE8C,sDAC9C,MAAA,KAED,wBACC,SAAA,KACA,cAAA,IAAA,MAAA,KACA,WAAA,IAAA,MAAA,KAGc,8CACd,SAAA,EAEc,kCACd,MAAA,KACA,OAAA,EAAA,KAEA,WAAY,4GACZ,OAAQ,sGAEM,4CACd,WAAA,KAIc,uCADA,qCAEA,8CACA,kCACd,OAAA,IAAA,MAAA,KAMD,kBACC,WAAA,KACA,OAAA,IAAA,MAAA,KAMD,iBACC,SAAA,SACA,QAAA,IACA,iBAAA,KACA,OAAA,IAAA,MAAA,KACA,cAAA,IACA,MAAA,KACA,YAAA,OACA,oBAAA,KACA,iBAAA,KACA,gBAAA,KACA,YAAA,KACA,eAAA,KACA,WAAA,EAAA,IAAA,IAAA,eAEe,mCACf,OAAA,QACA,eAAA,KAGsB,+BACF,6BACC,8BAHF,4BAInB,SAAA,SACA,eAAA,KACA,OAAA,IAAA,MAAA,YACA,WAAA,IACA,QAAA,GAKD,wBACC,WAAA,IAED,qBACC,WAAA,KAEsB,+BACH,4BACnB,KAAA,IACA,YAAA,KAEmB,4BACnB,OAAA,EACA,cAAA,MACA,iBAAA,KAEsB,+BACtB,IAAA,EACA,WAAA,MACA,YAAA,KACA,oBAAA,KAED,sBACC,YAAA,KAED,uBACC,YAAA,IAEoB,6BACC,8BACrB,IAAA,IACA,WAAA,KAEoB,6BACpB,MAAA,EACA,aAAA,MACA,kBAAA,KAEqB,8BACrB,KAAA,EACA,YAAA,MACA,mBAAA"}
|
||||
{"version":3,"sources":["../css/leaflet.css"],"names":[],"mappings":"AAUA,qBACA,eAPA,qBACA,uBAHA,cAMc,qBADA,kBAJd,cAGA,wBAGA,kBAGC,SAAA,SACA,KAAA,EACA,IAAA,EAED,mBACC,SAAA,OAGD,qBACA,uBAFA,cAGC,oBAAA,KACG,iBAAA,KACK,YAAA,KACN,kBAAA,KAGU,yBACZ,WAAA,IAGe,8BACf,gBAAA,0BAGe,wCACf,MAAA,OACA,OAAA,OACA,yBAAA,EAAA,EAED,qBACA,uBACC,QAAA,MAIwC,6CACxC,UAAA,eACA,WAAA,eAEuC,4CACA,4CAGrB,iCAFmB,0CAChB,2CAErB,UAAA,eACA,WAAA,eACA,MAAA,KACA,QAAA,EAGiB,sCACjB,iBAAA,MAAA,MACA,aAAA,MAAA,MAEiB,sCACjB,iBAAA,WAEA,aAAA,KACA,aAAA,WAEoC,yDACpC,iBAAA,KACA,aAAA,KAED,mBACC,4BAAA,YAEkB,qBAClB,4BAAA,oBAED,cACC,OAAA,QACA,WAAA,OAED,qBACC,WAAA,QAED,kBACC,MAAA,EACA,OAAA,EACA,gBAAA,WACK,WAAA,WACL,QAAA,IAGqB,0BACrB,iBAAA,KAGD,cAAwB,QAAA,IAExB,mBAAwB,QAAA,IACxB,sBAAwB,QAAA,IACxB,qBAAwB,QAAA,IACxB,qBAAwB,QAAA,IACxB,sBAA0B,QAAA,IAC1B,oBAAwB,QAAA,IAEN,yBAAS,QAAA,IACT,sBAAS,QAAA,IAE3B,mBACC,MAAA,IACA,OAAA,IAED,MACC,SAAA,kBACA,QAAA,aACA,SAAA,SAMD,iBACC,SAAA,SACA,QAAA,IACA,eAAA,eACA,eAAA,KAGD,gBADA,aAEC,SAAA,SACA,QAAA,KACA,eAAA,KAED,aACC,IAAA,EAED,eACC,MAAA,EAED,gBACC,OAAA,EAED,cACC,KAAA,EAED,iBACC,MAAA,KACA,MAAA,KAEc,gCACd,MAAA,MAEY,8BACZ,WAAA,KAEe,iCACf,cAAA,KAEa,+BACb,YAAA,KAEc,gCACd,aAAA,KAMkB,kCAClB,QAAA,EACA,mBAAA,QAAA,IAAA,OACG,gBAAA,QAAA,IAAA,OACK,WAAA,QAAA,IAAA,OAE4B,oDACpC,QAAA,EAED,uBACC,yBAAA,EAAA,EACI,qBAAA,EAAA,EACI,iBAAA,EAAA,EAEN,0BACF,YAAA,UAGkB,0CAClB,mBAAA,kBAAA,KAA4C,wBACzC,gBAAA,eAAA,KAAyC,wBACpC,WAAA,UAAA,KAAoC,wBAG3B,gCADC,iCAElB,mBAAA,KACG,gBAAA,KACK,WAAA,KAGU,sCAClB,WAAA,OAMD,qBACC,OAAA,QAED,cACC,OAAA,aACA,OAAA,UACA,OAAA,KAED,mBACmB,wCAClB,OAAA,UAGD,iBADA,oBAEC,OAAA,KAEiB,gCACc,qDACd,4CACjB,OAAA,KACA,OAAA,iBACA,OAAA,cACA,OAAA,SAMD,qBAFA,qBACA,uBAEoB,uBACpB,wBACC,eAAA,KAImB,yCADA,yCAEI,2CACoB,iDAC3C,eAAA,eACA,eAAA,KAKD,mBACC,WAAA,KACA,eAAA,IAEkB,qBAClB,MAAA,QAED,kBACC,OAAA,IAAA,OAAA,KACA,WAAA,qBAKD,mBACC,YAAa,iBAAA,MAAA,UAAA,WACb,UAAA,KACA,UAAA,OACA,YAAA,IAMD,aACC,WAAA,EAAA,IAAA,IAAA,gBACA,cAAA,IAEY,eACZ,iBAAA,KACA,cAAA,IAAA,MAAA,KACA,MAAA,KACA,OAAA,KACA,YAAA,KACA,QAAA,MACA,WAAA,OACA,gBAAA,KACA,MAAA,KAEY,eACb,+BACC,oBAAA,IAAA,IACA,kBAAA,UACA,QAAA,MAGa,qBADA,qBAEb,iBAAA,QAEa,2BACb,uBAAA,IACA,wBAAA,IAEa,0BACb,0BAAA,IACA,2BAAA,IACA,cAAA,KAEa,gCACb,OAAA,QACA,iBAAA,QACA,MAAA,KAG2B,8BAC3B,MAAA,KACA,OAAA,KACA,YAAA,KAE4B,0CAC5B,uBAAA,IACA,wBAAA,IAE4B,yCAC5B,0BAAA,IACA,2BAAA,IAKD,yBACA,0BACC,KAAA,IAAA,KAAA,iBAAgB,OAAA,UAChB,YAAA,IAGc,wCAAyC,yCACvD,UAAA,KAMD,wBACC,WAAA,EAAA,IAAA,IAAA,eACA,WAAA,KACA,cAAA,IAED,+BACC,iBAAA,uBACA,MAAA,KACA,OAAA,KAEe,+CACf,iBAAA,0BACA,gBAAA,KAAA,KAEc,8CACd,MAAA,KACA,OAAA,KAEuB,qDACS,gEAChC,QAAA,KAEgC,8DAChC,QAAA,MACA,SAAA,SAED,iCACC,QAAA,IAAA,KAAA,IAAA,IACA,MAAA,KACA,WAAA,KAED,kCACC,WAAA,OACA,WAAA,OACA,cAAA,IAED,iCACC,WAAA,IACA,SAAA,SACA,IAAA,IAEuB,8BACvB,QAAA,MACA,UAAA,KACA,UAAA,UAED,kCACC,OAAA,EACA,WAAA,IAAA,MAAA,KACA,OAAA,IAAA,MAAA,IAAA,KAID,2BACC,iBAAA,4BAMkB,gDAClB,WAAA,KACA,WAAA,qBACA,OAAA,EAED,6BACA,4BACC,QAAA,EAAA,IACA,MAAA,KACA,YAAA,IAE4B,+BAC5B,gBAAA,KAG6B,qCADA,qCAE7B,gBAAA,UAE4B,iCAC5B,QAAA,iBAEa,qCACb,YAAA,IAEe,uCACf,cAAA,IAED,4BACC,OAAA,IAAA,MAAA,KACA,WAAA,KACA,YAAA,IACA,QAAA,IAAA,IAAA,IACA,YAAA,OACA,SAAA,OACA,gBAAA,WACK,WAAA,WAEL,WAAA,KACA,WAAA,qBAE8B,8CAC9B,WAAA,IAAA,MAAA,KACA,cAAA,KACA,WAAA,KAEgD,+DAChD,cAAA,IAAA,MAAA,KAKc,4BAFA,4CACA,uCAEd,WAAA,KAGc,4BADA,uCAEd,OAAA,IAAA,MAAA,eACA,gBAAA,YAMD,eACC,SAAA,SACA,WAAA,OACA,cAAA,KAED,+BACC,QAAA,IACA,WAAA,KACA,cAAA,KAED,uBACC,OAAA,KAAA,KAAA,KAAA,KACA,YAAA,IACA,UAAA,KACA,UAAA,UACA,WAAA,IAEsB,yBACtB,OAAA,KAAA,EACA,OAAA,MAAA,EAED,6BACC,MAAA,KACA,OAAA,KACA,SAAA,SACA,KAAA,IACA,WAAA,KACA,YAAA,MACA,SAAA,OACA,eAAA,KAED,mBACC,MAAA,KACA,OAAA,KACA,QAAA,IAEA,OAAA,MAAA,KAAA,EACA,eAAA,KAEA,kBAAmB,cAChB,eAAgB,cACf,cAAe,cACX,UAAW,cAEpB,+BACA,mBACC,WAAA,KACA,MAAA,KACA,WAAA,EAAA,IAAA,KAAA,eAEmB,gDACnB,SAAA,SACA,IAAA,EACA,MAAA,EACA,OAAA,KACA,WAAA,OACA,MAAA,KACA,OAAA,KACA,KAAA,KAAA,KAAA,OAAA,QAAA,WACA,MAAA,QACA,gBAAA,KACA,WAAA,IAG8C,sDADA,sDAE9C,MAAA,QAED,wBACC,SAAA,KACA,cAAA,IAAA,MAAA,KACA,WAAA,IAAA,MAAA,KAGc,8CACd,SAAA,EAEc,kCACd,MAAA,KACA,OAAA,EAAA,KAEA,WAAY,4GACZ,OAAQ,sGAIM,uCADA,qCAEA,8CACA,kCACd,OAAA,IAAA,MAAA,KAMD,kBACC,WAAA,KACA,OAAA,IAAA,MAAA,KAMD,iBACC,SAAA,SACA,QAAA,IACA,iBAAA,KACA,OAAA,IAAA,MAAA,KACA,cAAA,IACA,MAAA,KACA,YAAA,OACA,oBAAA,KACA,iBAAA,KACA,gBAAA,KACA,YAAA,KACA,eAAA,KACA,WAAA,EAAA,IAAA,IAAA,eAEe,qCACf,OAAA,QACA,eAAA,KAGsB,+BACF,6BACC,8BAHF,4BAInB,SAAA,SACA,eAAA,KACA,OAAA,IAAA,MAAA,YACA,WAAA,IACA,QAAA,GAKD,wBACC,WAAA,IAED,qBACC,WAAA,KAEsB,+BACH,4BACnB,KAAA,IACA,YAAA,KAEmB,4BACnB,OAAA,EACA,cAAA,MACA,iBAAA,KAEsB,+BACtB,IAAA,EACA,WAAA,MACA,YAAA,KACA,oBAAA,KAED,sBACC,YAAA,KAED,uBACC,YAAA,IAEoB,6BACC,8BACrB,IAAA,IACA,WAAA,KAEoB,6BACpB,MAAA,EACA,aAAA,MACA,kBAAA,KAEqB,8BACrB,KAAA,EACA,YAAA,MACA,mBAAA,KAKD,aAEC,iBACC,2BAAA,MACA,aAAA"}
|
|
@ -279,6 +279,30 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
.result-map {
|
||||
clear: both;
|
||||
}
|
||||
.result-map .img-thumbnail {
|
||||
float: right;
|
||||
width: auto;
|
||||
height: 120px;
|
||||
border: 0;
|
||||
background: inherit;
|
||||
}
|
||||
.result-map .img-type {
|
||||
width: 20px;
|
||||
max-height: 20px;
|
||||
}
|
||||
.result-map .result-map-details {
|
||||
font-size: 13px;
|
||||
border-collapse: separate;
|
||||
border-spacing: 0 0.35rem;
|
||||
}
|
||||
.result-map .result-map-details th {
|
||||
font-weight: inherit;
|
||||
width: 20rem;
|
||||
vertical-align: top;
|
||||
}
|
||||
.result-map .result-map-details td {
|
||||
vertical-align: top;
|
||||
}
|
||||
.result-code {
|
||||
clear: both;
|
||||
}
|
||||
|
@ -405,6 +429,19 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
flex-flow: row wrap;
|
||||
align-content: stretch;
|
||||
}
|
||||
.search_categories a,
|
||||
#categories a {
|
||||
height: 3rem;
|
||||
flex-grow: 1;
|
||||
flex-basis: auto;
|
||||
border: #DDD 1px solid;
|
||||
border-right: none;
|
||||
color: #666;
|
||||
padding-bottom: 0.3rem;
|
||||
padding-top: 0.1rem;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
}
|
||||
.search_categories label,
|
||||
#categories label,
|
||||
.search_categories .input-group-addon,
|
||||
|
@ -414,22 +451,23 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
font-size: 1.2rem;
|
||||
font-weight: normal;
|
||||
background-color: white;
|
||||
border: #DDD 1px solid;
|
||||
border: none;
|
||||
border-right: none;
|
||||
color: #666;
|
||||
padding-bottom: 0.4rem;
|
||||
padding-top: 0.4rem;
|
||||
padding-bottom: 0;
|
||||
padding-top: 0;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
.search_categories label:last-child,
|
||||
#categories label:last-child,
|
||||
.search_categories a:last-child,
|
||||
#categories a:last-child,
|
||||
.search_categories .input-group-addon:last-child,
|
||||
#categories .input-group-addon:last-child {
|
||||
border-right: #DDD 1px solid;
|
||||
}
|
||||
.search_categories input[type="checkbox"]:checked + label,
|
||||
#categories input[type="checkbox"]:checked + label {
|
||||
.search_categories input[type="checkbox"]:checked + a,
|
||||
#categories input[type="checkbox"]:checked + a {
|
||||
color: #29314D;
|
||||
font-weight: bold;
|
||||
border-bottom: #01D7D4 5px solid;
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -252,6 +252,30 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
.result-map {
|
||||
clear: both;
|
||||
}
|
||||
.result-map .img-thumbnail {
|
||||
float: right;
|
||||
width: auto;
|
||||
height: 120px;
|
||||
border: 0;
|
||||
background: inherit;
|
||||
}
|
||||
.result-map .img-type {
|
||||
width: 20px;
|
||||
max-height: 20px;
|
||||
}
|
||||
.result-map .result-map-details {
|
||||
font-size: 13px;
|
||||
border-collapse: separate;
|
||||
border-spacing: 0 0.35rem;
|
||||
}
|
||||
.result-map .result-map-details th {
|
||||
font-weight: inherit;
|
||||
width: 20rem;
|
||||
vertical-align: top;
|
||||
}
|
||||
.result-map .result-map-details td {
|
||||
vertical-align: top;
|
||||
}
|
||||
.result-code {
|
||||
clear: both;
|
||||
}
|
||||
|
@ -378,6 +402,19 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
flex-flow: row wrap;
|
||||
align-content: stretch;
|
||||
}
|
||||
.search_categories a,
|
||||
#categories a {
|
||||
height: 3rem;
|
||||
flex-grow: 1;
|
||||
flex-basis: auto;
|
||||
border: #DDD 1px solid;
|
||||
border-right: none;
|
||||
color: #666;
|
||||
padding-bottom: 0.3rem;
|
||||
padding-top: 0.1rem;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
}
|
||||
.search_categories label,
|
||||
#categories label,
|
||||
.search_categories .input-group-addon,
|
||||
|
@ -387,22 +424,23 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
font-size: 1.2rem;
|
||||
font-weight: normal;
|
||||
background-color: white;
|
||||
border: #DDD 1px solid;
|
||||
border: none;
|
||||
border-right: none;
|
||||
color: #666;
|
||||
padding-bottom: 0.4rem;
|
||||
padding-top: 0.4rem;
|
||||
padding-bottom: 0;
|
||||
padding-top: 0;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
.search_categories label:last-child,
|
||||
#categories label:last-child,
|
||||
.search_categories a:last-child,
|
||||
#categories a:last-child,
|
||||
.search_categories .input-group-addon:last-child,
|
||||
#categories .input-group-addon:last-child {
|
||||
border-right: #DDD 1px solid;
|
||||
}
|
||||
.search_categories input[type="checkbox"]:checked + label,
|
||||
#categories input[type="checkbox"]:checked + label {
|
||||
.search_categories input[type="checkbox"]:checked + a,
|
||||
#categories input[type="checkbox"]:checked + a {
|
||||
color: #29314D;
|
||||
font-weight: bold;
|
||||
border-bottom: #01D7D4 5px solid;
|
||||
|
@ -947,7 +985,7 @@ ul.nav li a {
|
|||
background: #1d1f21 none !important;
|
||||
color: #D5D8D7 !important;
|
||||
}
|
||||
#categories * {
|
||||
#categories a {
|
||||
border: 1px solid #3d3f43 !important;
|
||||
}
|
||||
#categories *:checked + label {
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -215,6 +215,18 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
flex-wrap: wrap;
|
||||
align-content: stretch;
|
||||
}
|
||||
.search_categories a,
|
||||
#categories a {
|
||||
flex-grow: 1;
|
||||
flex-basis: auto;
|
||||
border: #DDD 1px solid;
|
||||
border-right: none;
|
||||
color: #666;
|
||||
padding-bottom: 0.4rem;
|
||||
padding-top: 0.4rem;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
}
|
||||
.search_categories label,
|
||||
#categories label,
|
||||
.search_categories .input-group-addon,
|
||||
|
@ -230,10 +242,11 @@ input[type=checkbox]:not(:checked) + .label_hide_if_checked + .label_hide_if_not
|
|||
padding-bottom: 0.8rem;
|
||||
padding-top: 0.8rem;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
.search_categories label:last-child,
|
||||
#categories label:last-child,
|
||||
.search_categories a:last-child,
|
||||
#categories a:last-child,
|
||||
.search_categories .input-group-addon:last-child,
|
||||
#categories .input-group-addon:last-child {
|
||||
border-right: #DDD 1px solid;
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1 +1 @@
|
|||
{"version":3,"sources":["../src/less/pointhi/footer.less","../src/less/pointhi/checkbox.less","../src/less/pointhi/onoff.less","../src/less/pointhi/results.less","../src/less/pointhi/infobox.less","../src/less/pointhi/search.less","../src/less/pointhi/advanced.less","../src/less/pointhi/cursor.less","../src/less/pointhi/pygments.less","../src/less/pointhi/navbar.less","../src/less/pointhi/preferences.less"],"names":[],"mappings":"AAEA,KACE,SAAA,SACA,WAAA,KAGF,KAEE,cAAA,KAGF,QACE,SAAA,SACA,OAAA,EACA,MAAA,KAEA,OAAA,KChB2B,oDAAoF,+EAC/G,QAAA,KAI2H,qFAA1F,8DACjC,QAAA,KCPF,gBACI,MAAA,IAEJ,aACI,SAAA,SACA,MAAA,MACA,oBAAA,KACA,iBAAA,KACA,gBAAA,KAEJ,sBACI,QAAA,KAEJ,mBACI,QAAA,MACA,SAAA,OACA,OAAA,QACA,OAAA,IAAA,MAAA,eACA,cAAA,eAEJ,mBACI,QAAA,MACA,WAAA,OAAA,IAAA,QAAA,GAGyC,yBAA3B,0BACd,QAAA,MACA,MAAA,KACA,MAAA,IACA,OAAA,KACA,QAAA,EACA,YAAA,KACA,UAAA,KACA,WAAA,WACA,QAAA,GACA,iBAAA,KAGJ,oBACI,QAAA,MACA,MAAA,KACA,iBAAA,KACA,SAAA,SACA,IAAA,EACA,OAAA,EACA,MAAA,EACA,OAAA,IAAA,MAAA,eACA,cAAA,eACA,WAAA,IAAA,IAAA,QAAA,GAE+C,oEAC/C,aAAA,EAE+C,qEAC/C,MAAA,KACA,iBAAA,QCtDJ,eACI,cAAA,IACA,WAAA,KAEA,wBACI,cAAA,KAGJ,iBACI,eAAA,OAEA,4BACI,YAAA,IAKZ,gBACI,WAAA,IACA,UAAA,WAEA,2BACI,YAAA,IAKR,gBACI,MAAA,KAIJ,eACI,MAAA,eACA,OAAA,MAGJ,eACI,OAAA,IACA,WAAA,MAIJ,eACI,MAAA,KAIJ,iBACI,MAAA,KAIJ,YACI,MAAA,KAIJ,aACI,MAAA,KAIJ,iBACI,OAAA,IAAA,IACA,UAAA,KAEA,sBACI,UAAA,KACA,YAAA,OACA,UAAA,WACA,WAAA,KAKR,iBACI,aAAA,IAIJ,YACI,WAAA,KACA,eAAA,KAGJ,eACI,MAAA,KACA,WAAA,KAGgB,0BAChB,UAAA,WAGJ,eACI,WAAA,EAAA,IAAA,KAAA,eAGJ,eACI,gBAAA,YACA,iBAAA,KACA,OAAA,IAAA,MAAA,eACA,cAAA,IACA,WAAA,EAAA,IAAA,IAAA,eACA,QAAA,EAAA,KACA,SAAA,SAGJ,WACI,YAAA,IC7GA,aACI,WAAA,MAGJ,uBACI,cAAA,KACA,UAAA,WACA,aAAA,MAGS,kCACT,cAAA,EAGJ,qBACI,YAAA,OACA,SAAA,OACA,cAAA,SACA,QAAA,MAGJ,yBACI,MAAA,KACA,WAAA,OACA,cAAA,EAIc,yCACd,WAAA,MACA,SAAA,OAEsB,iDACtB,WAAA,KAIgC,+DAChC,QAAA,MAEgC,6DAChC,QAAA,KAIwC,qEACxC,QAAA,MAEwC,uEACxC,QAAA,KAIiC,0DACjC,QAAA,KAEyC,kEACzC,QAAA,MC3DY,YAApB,mBACI,eAAA,WACA,cAAA,OACA,WAAA,OACA,QAAA,KACA,UAAA,KACA,cAAA,QAEO,+BAAP,kBAAO,sCAAP,yBACI,UAAA,EACA,WAAA,KACA,UAAA,OACA,YAAA,IACA,iBAAA,KACA,OAAA,KAAA,IAAA,MACA,aAAA,KACA,MAAA,KACA,eAAA,MACA,YAAA,MACA,WAAA,OACA,UAAA,KAGgC,0CAA/B,6BAA+B,iDAA/B,oCACD,aAAA,KAAA,IAAA,MAG2B,+CAAA,sDAC3B,MAAA,KACA,YAAA,IACA,iBAAA,KAIR,iBACI,SAAA,mBACA,OAAA,IACA,MAAA,IACA,SAAA,OACA,KAAM,sBACN,KAAA,sBACA,YAAA,OCzCJ,2BACI,QAAA,KACA,WAAA,OACA,cAAA,KACA,MAAA,KAEO,8CAAP,iCACI,UAAA,OACA,YAAA,IACA,iBAAA,KACA,OAAA,KAAA,IAAA,MACA,aAAA,KACA,MAAA,KACA,eAAA,MACA,aAAA,OACA,cAAA,OAGgC,yDAA/B,4CACD,aAAA,KAAA,IAAA,MAGC,6CACD,QAAA,KAGwB,2DACxB,MAAA,KACA,YAAA,IACA,iBAAA,KAIR,gBACI,QAAA,KAGoB,mDACpB,QAAA,MAGJ,UACI,QAAA,EACA,WAAA,MACA,WAAA,MACA,gBAAO,iBACH,OAAA,QC7CR,aACI,OAAA,eAGJ,gBACI,OAAA,kBCDY,yBACZ,sBAAA,KACA,oBAAA,KACA,mBAAA,KACA,iBAAA,KACA,gBAAA,KACA,YAAA,KACA,OAAA,QASA,aAAA,IACA,WAAA,MARC,oCACG,WAAA,IAEH,yCACG,WAAA,IAOQ,oBAAM,YAAA,KACK,mCAAU,MAAA,QAAgB,iBAAA,YAA+B,aAAA,IAAmB,cAAA,IACnF,6BAAW,MAAA,QAAgB,iBAAA,YAA+B,aAAA,IAAmB,cAAA,IACtE,oCAAW,MAAA,KAAgB,iBAAA,QAA2B,aAAA,IAAmB,cAAA,IACxE,qCAAW,MAAA,KAAgB,iBAAA,QAA2B,aAAA,IAAmB,cAAA,IACrF,qBAAO,iBAAA,KACvB,gBAAkB,WAAA,QACF,mBAAK,MAAA,QAAgB,WAAA,OACrB,qBAAO,OAAA,IAAA,MAAA,IACP,mBAAK,MAAA,MAAgB,YAAA,IACrB,mBAAK,MAAA,KACL,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QACN,qBAAO,MAAA,QAAgB,WAAA,OACvB,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QACN,oBAAM,WAAA,OACN,oBAAM,MAAA,IACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,QACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,YAAA,IACN,oBAAM,MAAA,OAAgB,YAAA,IACtB,oBAAM,MAAA,KACN,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,MACN,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,QACN,mBAAK,MAAA,KACL,mBAAK,MAAA,QACL,oBAAM,MAAA,QACN,oBAAM,MAAA,MACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,QAAgB,YAAA,IACtB,oBAAM,MAAA,KACN,oBAAM,MAAA,QACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,QACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,mBAAK,MAAA,KACL,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,QACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,MACN,oBAAM,MAAA,KACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,MACN,oBAAM,MAAA,KACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,KClGtB,cACI,WAAA,KACA,MAAA,KACA,OAAA,OACA,UAAA,OACA,YAAA,OACA,QAAA,MACA,YAAA,IACA,cAAA,OAEA,gBAAI,sBACA,aAAA,KACA,gBAAA,KAGM,0BACN,MAAA,KACA,YAAA,KCjBY,mBAA0B,mBAC1C,eAAA,iBAGJ,gBACI,QAAA,KACA,SAAA,SACA,QAAA,MAAA,KACA,OAAA,EAAA,EAAA,EAAA,KACA,OAAA,IAAA,MAAA,KACA,WAAA,KACA,UAAA,KACA,YAAA,IACA,QAAA,QAGqC,sBAAhC,yBACL,QAAA"}
|
||||
{"version":3,"sources":["../src/less/pointhi/footer.less","../src/less/pointhi/checkbox.less","../src/less/pointhi/onoff.less","../src/less/pointhi/results.less","../src/less/pointhi/infobox.less","../src/less/pointhi/search.less","../src/less/pointhi/advanced.less","../src/less/pointhi/cursor.less","../src/less/pointhi/pygments.less","../src/less/pointhi/navbar.less","../src/less/pointhi/preferences.less"],"names":[],"mappings":"AAEA,KACE,SAAA,SACA,WAAA,KAGF,KAEE,cAAA,KAGF,QACE,SAAA,SACA,OAAA,EACA,MAAA,KAEA,OAAA,KChB2B,oDAAoF,+EAC/G,QAAA,KAI2H,qFAA1F,8DACjC,QAAA,KCPF,gBACI,MAAA,IAEJ,aACI,SAAA,SACA,MAAA,MACA,oBAAA,KACA,iBAAA,KACA,gBAAA,KAEJ,sBACI,QAAA,KAEJ,mBACI,QAAA,MACA,SAAA,OACA,OAAA,QACA,OAAA,IAAA,MAAA,eACA,cAAA,eAEJ,mBACI,QAAA,MACA,WAAA,OAAA,IAAA,QAAA,GAGyC,yBAA3B,0BACd,QAAA,MACA,MAAA,KACA,MAAA,IACA,OAAA,KACA,QAAA,EACA,YAAA,KACA,UAAA,KACA,WAAA,WACA,QAAA,GACA,iBAAA,KAGJ,oBACI,QAAA,MACA,MAAA,KACA,iBAAA,KACA,SAAA,SACA,IAAA,EACA,OAAA,EACA,MAAA,EACA,OAAA,IAAA,MAAA,eACA,cAAA,eACA,WAAA,IAAA,IAAA,QAAA,GAE+C,oEAC/C,aAAA,EAE+C,qEAC/C,MAAA,KACA,iBAAA,QCtDJ,eACI,cAAA,IACA,WAAA,KAEA,wBACI,cAAA,KAGJ,iBACI,eAAA,OAEA,4BACI,YAAA,IAKZ,gBACI,WAAA,IACA,UAAA,WAEA,2BACI,YAAA,IAKR,gBACI,MAAA,KAIJ,eACI,MAAA,eACA,OAAA,MAGJ,eACI,OAAA,IACA,WAAA,MAIJ,eACI,MAAA,KAIJ,iBACI,MAAA,KAIJ,YACI,MAAA,KAIJ,aACI,MAAA,KAIJ,iBACI,OAAA,IAAA,IACA,UAAA,KAEA,sBACI,UAAA,KACA,YAAA,OACA,UAAA,WACA,WAAA,KAKR,iBACI,aAAA,IAIJ,YACI,WAAA,KACA,eAAA,KAGJ,eACI,MAAA,KACA,WAAA,KAGgB,0BAChB,UAAA,WAGJ,eACI,WAAA,EAAA,IAAA,KAAA,eAGJ,eACI,gBAAA,YACA,iBAAA,KACA,OAAA,IAAA,MAAA,eACA,cAAA,IACA,WAAA,EAAA,IAAA,IAAA,eACA,QAAA,EAAA,KACA,SAAA,SAGJ,WACI,YAAA,IC7GA,aACI,WAAA,MAGJ,uBACI,cAAA,KACA,UAAA,WACA,aAAA,MAGS,kCACT,cAAA,EAGJ,qBACI,YAAA,OACA,SAAA,OACA,cAAA,SACA,QAAA,MAGJ,yBACI,MAAA,KACA,WAAA,OACA,cAAA,EAIc,yCACd,WAAA,MACA,SAAA,OAEsB,iDACtB,WAAA,KAIgC,+DAChC,QAAA,MAEgC,6DAChC,QAAA,KAIwC,qEACxC,QAAA,MAEwC,uEACxC,QAAA,KAIiC,0DACjC,QAAA,KAEyC,kEACzC,QAAA,MC3DY,YAApB,mBACI,eAAA,WACA,cAAA,OACA,WAAA,OACA,QAAA,KACA,UAAA,KACA,cAAA,QAEF,cAAA,qBACE,UAAA,EACA,WAAA,KACA,OAAA,KAAA,IAAA,MACA,aAAA,KACA,MAAA,KACA,eAAA,MACA,YAAA,MACA,WAAA,OACA,UAAA,KAGK,+BAAP,kBAAO,sCAAP,yBACE,UAAA,EACA,WAAA,KACA,UAAA,OACA,YAAA,IACA,iBAAA,KACA,OAAA,KAAA,IAAA,MACA,aAAA,KACA,MAAA,KACA,eAAA,MACA,YAAA,MACA,WAAA,OACA,MAAA,KACA,OAAA,KAG8B,0CAA/B,yBAA+B,iDAA/B,gCACG,aAAA,KAAA,IAAA,MAG6B,+CAAA,sDAC3B,MAAA,KACA,YAAA,IACA,iBAAA,KAIR,iBACI,SAAA,mBACA,OAAA,IACA,MAAA,IACA,SAAA,OACA,KAAM,sBACN,KAAA,sBACA,YAAA,OCtDJ,2BACI,QAAA,KACA,WAAA,OACA,cAAA,KACA,MAAA,KAEO,8CAAP,iCACI,UAAA,OACA,YAAA,IACA,iBAAA,KACA,OAAA,KAAA,IAAA,MACA,aAAA,KACA,MAAA,KACA,eAAA,MACA,aAAA,OACA,cAAA,OAGgC,yDAA/B,4CACD,aAAA,KAAA,IAAA,MAGC,6CACD,QAAA,KAGwB,2DACxB,MAAA,KACA,YAAA,IACA,iBAAA,KAIR,gBACI,QAAA,KAGoB,mDACpB,QAAA,MAGJ,UACI,QAAA,EACA,WAAA,MACA,WAAA,MACA,gBAAO,iBACH,OAAA,QC7CR,aACI,OAAA,eAGJ,gBACI,OAAA,kBCDY,yBACZ,sBAAA,KACA,oBAAA,KACA,mBAAA,KACA,iBAAA,KACA,gBAAA,KACA,YAAA,KACA,OAAA,QASA,aAAA,IACA,WAAA,MARC,oCACG,WAAA,IAEH,yCACG,WAAA,IAOQ,oBAAM,YAAA,KACK,mCAAU,MAAA,QAAgB,iBAAA,YAA+B,aAAA,IAAmB,cAAA,IACnF,6BAAW,MAAA,QAAgB,iBAAA,YAA+B,aAAA,IAAmB,cAAA,IACtE,oCAAW,MAAA,KAAgB,iBAAA,QAA2B,aAAA,IAAmB,cAAA,IACxE,qCAAW,MAAA,KAAgB,iBAAA,QAA2B,aAAA,IAAmB,cAAA,IACrF,qBAAO,iBAAA,KACvB,gBAAkB,WAAA,QACF,mBAAK,MAAA,QAAgB,WAAA,OACrB,qBAAO,OAAA,IAAA,MAAA,IACP,mBAAK,MAAA,MAAgB,YAAA,IACrB,mBAAK,MAAA,KACL,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QACN,qBAAO,MAAA,QAAgB,WAAA,OACvB,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QACN,oBAAM,WAAA,OACN,oBAAM,MAAA,IACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,QACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,YAAA,IACN,oBAAM,MAAA,OAAgB,YAAA,IACtB,oBAAM,MAAA,KACN,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,MACN,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,QACN,mBAAK,MAAA,KACL,mBAAK,MAAA,QACL,oBAAM,MAAA,QACN,oBAAM,MAAA,MACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,QAAgB,YAAA,IACtB,oBAAM,MAAA,KACN,oBAAM,MAAA,QACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,MAAgB,YAAA,IACtB,oBAAM,MAAA,QACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,mBAAK,MAAA,KACL,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,KACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QAAgB,WAAA,OACtB,oBAAM,MAAA,QACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,QACN,oBAAM,MAAA,KAAgB,YAAA,IACtB,oBAAM,MAAA,MACN,oBAAM,MAAA,KACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,MACN,oBAAM,MAAA,KACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,QACN,oBAAM,MAAA,KClGtB,cACI,WAAA,KACA,MAAA,KACA,OAAA,OACA,UAAA,OACA,YAAA,OACA,QAAA,MACA,YAAA,IACA,cAAA,OAEA,gBAAI,sBACA,aAAA,KACA,gBAAA,KAGM,0BACN,MAAA,KACA,YAAA,KCjBY,mBAA0B,mBAC1C,eAAA,iBAGJ,gBACI,QAAA,KACA,SAAA,SACA,QAAA,MAAA,KACA,OAAA,EAAA,EAAA,EAAA,KACA,OAAA,IAAA,MAAA,KACA,WAAA,KACA,UAAA,KACA,YAAA,IACA,QAAA,QAGqC,sBAAhC,yBACL,QAAA"}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -51,7 +51,7 @@ ul.nav li a {
|
|||
color:#D5D8D7 !important;
|
||||
}
|
||||
|
||||
#categories *, {
|
||||
#categories a, {
|
||||
border: 1px solid #3d3f43 !important;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,25 +6,40 @@
|
|||
flex-flow: row wrap;
|
||||
align-content: stretch;
|
||||
|
||||
a {
|
||||
height: 3rem;
|
||||
flex-grow: 1;
|
||||
flex-basis: auto;
|
||||
border: @mild-gray 1px solid;
|
||||
border-right: none;
|
||||
color: @dark-gray;
|
||||
padding-bottom: 0.3rem;
|
||||
padding-top: 0.1rem;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
}
|
||||
|
||||
label, .input-group-addon {
|
||||
flex-grow: 1;
|
||||
flex-basis: auto;
|
||||
font-size: 1.2rem;
|
||||
font-weight: normal;
|
||||
background-color: white;
|
||||
border: @mild-gray 1px solid;
|
||||
border: none;
|
||||
border-right: none;
|
||||
color: @dark-gray;
|
||||
padding-bottom: 0.4rem;
|
||||
padding-top: 0.4rem;
|
||||
padding-bottom: 0;
|
||||
padding-top: 0;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
label:last-child, .input-group-addon:last-child {
|
||||
|
||||
a:last-child, .input-group-addon:last-child {
|
||||
border-right: @mild-gray 1px solid;
|
||||
}
|
||||
|
||||
input[type="checkbox"]:checked + label {
|
||||
input[type="checkbox"]:checked + a {
|
||||
color: @black;
|
||||
font-weight: bold;
|
||||
border-bottom: @light-green 5px solid;
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
@import "variables.less";
|
||||
|
||||
@import "footer.less";
|
||||
|
||||
@import "checkbox.less";
|
||||
|
|
|
@ -6,24 +6,37 @@
|
|||
flex-wrap: wrap;
|
||||
align-content: stretch;
|
||||
|
||||
label, .input-group-addon {
|
||||
flex-grow: 1;
|
||||
flex-basis: auto;
|
||||
font-size: 1.3rem;
|
||||
font-weight: normal;
|
||||
background-color: white;
|
||||
border: #DDD 1px solid;
|
||||
border-right: none;
|
||||
color: #333;
|
||||
padding-bottom: 0.8rem;
|
||||
padding-top: 0.8rem;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
}
|
||||
a {
|
||||
flex-grow: 1;
|
||||
flex-basis: auto;
|
||||
border: @mild-gray 1px solid;
|
||||
border-right: none;
|
||||
color: @dark-gray;
|
||||
padding-bottom: 0.4rem;
|
||||
padding-top: 0.4rem;
|
||||
text-align: center;
|
||||
min-width: 50px;
|
||||
}
|
||||
|
||||
label:last-child, .input-group-addon:last-child {
|
||||
border-right: #DDD 1px solid;
|
||||
}
|
||||
label, .input-group-addon {
|
||||
flex-grow: 1;
|
||||
flex-basis: auto;
|
||||
font-size: 1.3rem;
|
||||
font-weight: normal;
|
||||
background-color: white;
|
||||
border: #DDD 1px solid;
|
||||
border-right: none;
|
||||
color: #333;
|
||||
padding-bottom: 0.8rem;
|
||||
padding-top: 0.8rem;
|
||||
text-align: center;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
a:last-child, .input-group-addon:last-child {
|
||||
border-right: #DDD 1px solid;
|
||||
}
|
||||
|
||||
input[type="checkbox"]:checked + label{
|
||||
color: black;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
/*! searx | 23-03-2021 | */
|
||||
/*! searx | 25-07-2022 | https://github.com/searx/searx */
|
||||
/*
|
||||
* searx, A privacy-respecting, hackable metasearch engine
|
||||
*
|
||||
|
@ -1902,6 +1902,30 @@ article.result-images[data-vim-selected]::before {
|
|||
background-color: rgba(0, 0, 0, 0.6);
|
||||
font-size: 0.7em;
|
||||
}
|
||||
.result-map img.image {
|
||||
float: right !important;
|
||||
height: 100px !important;
|
||||
width: auto !important;
|
||||
}
|
||||
.result-map table {
|
||||
font-size: .9em;
|
||||
width: auto;
|
||||
border-collapse: separate;
|
||||
border-spacing: 0 0.35rem;
|
||||
}
|
||||
.result-map table th {
|
||||
font-weight: inherit;
|
||||
width: 17rem;
|
||||
vertical-align: top;
|
||||
text-align: left;
|
||||
}
|
||||
.result-map table td {
|
||||
vertical-align: top;
|
||||
text-align: left;
|
||||
}
|
||||
.hidden {
|
||||
display: none !important;
|
||||
}
|
||||
.torrent_result {
|
||||
border-left: 10px solid lightgray;
|
||||
padding-left: 3px;
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,4 @@
|
|||
/*! searx | 23-03-2021 | */
|
||||
/*! searx | 25-07-2022 | https://github.com/searx/searx */
|
||||
/*
|
||||
* searx, A privacy-respecting, hackable metasearch engine
|
||||
*
|
||||
|
@ -1902,6 +1902,30 @@ article.result-images[data-vim-selected]::before {
|
|||
background-color: rgba(0, 0, 0, 0.6);
|
||||
font-size: 0.7em;
|
||||
}
|
||||
.result-map img.image {
|
||||
float: right !important;
|
||||
height: 100px !important;
|
||||
width: auto !important;
|
||||
}
|
||||
.result-map table {
|
||||
font-size: .9em;
|
||||
width: auto;
|
||||
border-collapse: separate;
|
||||
border-spacing: 0 0.35rem;
|
||||
}
|
||||
.result-map table th {
|
||||
font-weight: inherit;
|
||||
width: 17rem;
|
||||
vertical-align: top;
|
||||
text-align: left;
|
||||
}
|
||||
.result-map table td {
|
||||
vertical-align: top;
|
||||
text-align: left;
|
||||
}
|
||||
.hidden {
|
||||
display: none !important;
|
||||
}
|
||||
.torrent_result {
|
||||
border-left: 10px solid lightgray;
|
||||
padding-left: 3px;
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,4 +1,4 @@
|
|||
/*! simple/searx.min.js | 23-03-2021 | */
|
||||
/*! simple/searx.min.js | 25-07-2022 | https://github.com/searx/searx */
|
||||
|
||||
(function(t,e){"use strict";var a=e.currentScript||function(){var t=e.getElementsByTagName("script");return t[t.length-1]}();t.searx={touch:"ontouchstart"in t||t.DocumentTouch&&document instanceof DocumentTouch||false,method:a.getAttribute("data-method"),autocompleter:a.getAttribute("data-autocompleter")==="true",search_on_category_select:a.getAttribute("data-search-on-category-select")==="true",infinite_scroll:a.getAttribute("data-infinite-scroll")==="true",static_path:a.getAttribute("data-static-path"),translations:JSON.parse(a.getAttribute("data-translations"))};e.getElementsByTagName("html")[0].className=t.searx.touch?"js touch":"js"})(window,document);
|
||||
//# sourceMappingURL=searx.head.min.js.map
|
|
@ -1235,7 +1235,7 @@ module.exports = AutoComplete;
|
|||
(function(w, d, searx) {
|
||||
'use strict';
|
||||
|
||||
var firstFocus = true, qinput_id = "q", qinput;
|
||||
var firstFocus = true, qinput_id = "q.autofocus", qinput;
|
||||
|
||||
function placeCursorAtEnd(element) {
|
||||
if (element.setSelectionRange) {
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -17,7 +17,7 @@
|
|||
(function(w, d, searx) {
|
||||
'use strict';
|
||||
|
||||
var firstFocus = true, qinput_id = "q", qinput;
|
||||
var firstFocus = true, qinput_id = "q.autofocus", qinput;
|
||||
|
||||
function placeCursorAtEnd(element) {
|
||||
if (element.setSelectionRange) {
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
<div id="categories">
|
||||
{%- if rtl -%}
|
||||
{% for category in categories | reverse -%}
|
||||
<a>
|
||||
<input class="hidden" type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}" {% if category in selected_categories %}checked="checked"{% endif %} />{{- '' -}}
|
||||
<label for="checkbox_{{ category|replace(' ', '_') }}">{{ _(category) }}</label>
|
||||
</a>
|
||||
{%- endfor %}
|
||||
{%- else -%}
|
||||
{% for category in categories -%}
|
||||
<a>
|
||||
<input class="hidden" type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}" {% if category in selected_categories %}checked="checked"{% endif %} />{{- '' -}}
|
||||
<label for="checkbox_{{ category|replace(' ', '_') }}">{{ _(category) }}</label>
|
||||
</a>
|
||||
{%- endfor %}
|
||||
{%- endif -%}
|
||||
</div>
|
||||
|
|
|
@ -36,6 +36,9 @@
|
|||
<small>{{ result_link(result.cached_url, icon('link') + _('cached'), "text-info", id) }}</small>
|
||||
{%- elif not result.is_onion -%}
|
||||
<small>{{ result_link("https://web.archive.org/web/" + result.url, icon('link') + _('cached'), "text-info", id) }}</small>
|
||||
{% if archive_today %}
|
||||
<small>{{ result_link("https://archive.today/" + result.url, icon('link') + _('archive'), "text-info", id) }}</small>
|
||||
{% endif %}
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- if proxify -%}
|
||||
|
@ -72,6 +75,9 @@
|
|||
<small>{{ result_link(result.cached_url, icon('link') + _('cached'), "text-info", id) }}</small>
|
||||
{%- elif not result.is_onion -%}
|
||||
<small>{{ result_link("https://web.archive.org/web/" + result.url, icon('link') + _('cached'), "text-info", id) }}</small>
|
||||
{% if archive_today %}
|
||||
<small>{{ result_link("https://archive.today/" + result.url, icon('link') + _('archive'), "text-info", id) }}</small>
|
||||
{% endif %}
|
||||
{%- endif -%}
|
||||
{%- endif -%}
|
||||
{%- if proxify -%}
|
||||
|
|
|
@ -150,6 +150,28 @@
|
|||
{{ preferences_item_footer(info, label, rtl) }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'autofocus' not in locked_preferences %}
|
||||
{% set label = _('Autofocus search field') %}
|
||||
{% set info = _('Turn off if you use your keyboard to scroll') %}
|
||||
{{ preferences_item_header(info, label, rtl, 'autofocus') }}
|
||||
<select class="form-control {{ custom_select_class(rtl) }}" name="autofocus" id="autofocus">
|
||||
<option value="1" {% if autofocus %}selected="selected"{% endif %}>{{ _('On') }}</option>
|
||||
<option value="0" {% if not autofocus %}selected="selected"{% endif %}>{{ _('Off')}}</option>
|
||||
</select>
|
||||
{{ preferences_item_footer(info, label, rtl) }}
|
||||
{% endif %}
|
||||
|
||||
{% if 'archive_today' not in locked_preferences %}
|
||||
{% set label = _('Show archive.today links') %}
|
||||
{% set info = _('Alternative link cache service') %}
|
||||
{{ preferences_item_header(info, label, rtl, 'archive_today') }}
|
||||
<select class="form-control {{ custom_select_class(rtl) }}" name="archive_today" id="archive_today">
|
||||
<option value="1" {% if archive_today %}selected="selected"{% endif %}>{{ _('On') }}</option>
|
||||
<option value="0" {% if not archive_today %}selected="selected"{% endif %}>{{ _('Off')}}</option>
|
||||
</select>
|
||||
{{ preferences_item_footer(info, label, rtl) }}
|
||||
{% endif %}
|
||||
|
||||
{% set label = _('Show advanced settings') %}
|
||||
{% set info = _('Show advanced settings panel in the home page by default') %}
|
||||
{{ preferences_item_header(info, label, rtl, 'advanced_search') }}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<div class="row">
|
||||
<div class="col-xs-12 col-md-8">
|
||||
<div class="input-group search-margin">
|
||||
<input type="search" autofocus name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
|
||||
<input type="search" {% if autofocus %}autofocus{% endif %} name="q" class="form-control" id="q" placeholder="{{ _('Search for...') }}" aria-label="{{ _('Search for...') }}" autocomplete="off" value="{{ q }}" accesskey="s">
|
||||
<span class="input-group-btn">
|
||||
<button type="submit" class="btn btn-default" aria-label="{{ _('Start search') }}"><span class="hide_if_nojs">{{ icon('search') }}</span><span class="hidden active_if_nojs">{{ _('Start search') }}</span></button>
|
||||
<button type="button" id="clear_search" class="btn btn-default hide_if_nojs" aria-label="{{ _('Clear search') }}">{{ icon('remove') }}</button>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<div id="categories">{{- '' -}}
|
||||
<div id="categories_container">
|
||||
{%- for category in categories -%}
|
||||
<div class="category"><input type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}"{% if category in selected_categories %} checked="checked"{% endif %}/><label for="checkbox_{{ category|replace(' ', '_') }}" class="tooltips">{{ _(category) }}</label></div>
|
||||
<div class="category"><a><input type="checkbox" id="checkbox_{{ category|replace(' ', '_') }}" name="category_{{ category }}"{% if category in selected_categories %} checked="checked"{% endif %}/><label for="checkbox_{{ category|replace(' ', '_') }}" class="tooltips">{{ _(category) }}</label></a></div>
|
||||
{%- endfor -%}
|
||||
{%- if display_tooltip %}<div class="help">{{ _('Click on the magnifier to perform search') }}</div>{% endif -%}
|
||||
</div>{{- '' -}}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue