|
@@ -0,0 +1,7586 @@
|
|
|
|
+# HG changeset patch
|
|
|
|
+# User Thomas Wisniewski <twisniewski@mozilla.com>
|
|
|
|
+# Date 1644937638 0
|
|
|
|
+# Node ID 8db101a9793765d705c30577d95ea6b6ad99b28c
|
|
|
|
+# Parent 39b976c7b734a6a6706cb686b2faaffb2e35c7d8
|
|
|
|
+Bug 1715900 - Bump urllib3 to version 1.26.0, boto3 to 1.16.63, and botocore to 1.19.63; r=mhentges
|
|
|
|
+
|
|
|
|
+Differential Revision: https://phabricator.services.mozilla.com/D138383
|
|
|
|
+
|
|
|
|
+diff --git a/third_party/python/requirements.in b/third_party/python/requirements.in
|
|
|
|
+--- a/third_party/python/requirements.in
|
|
|
|
++++ b/third_party/python/requirements.in
|
|
|
|
+@@ -35,10 +35,11 @@ pyasn1==0.4.8
|
|
|
|
+ pytest==3.6.2
|
|
|
|
+ python-hglib==2.4
|
|
|
|
+ pytoml==0.1.10
|
|
|
|
+ pyyaml==5.4.1
|
|
|
|
+ redo==2.0.3
|
|
|
|
+ requests==2.25.1
|
|
|
|
+ responses==0.10.6
|
|
|
|
+ six==1.13.0
|
|
|
|
++urllib3==1.26
|
|
|
|
+ voluptuous==0.11.5
|
|
|
|
+ yamllint==1.23
|
|
|
|
+diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt
|
|
|
|
+--- a/third_party/python/requirements.txt
|
|
|
|
++++ b/third_party/python/requirements.txt
|
|
|
|
+@@ -197,20 +197,22 @@ six==1.13.0 \
|
|
|
|
+ # -r requirements-mach-vendor-python.in
|
|
|
|
+ # blessings
|
|
|
|
+ # compare-locales
|
|
|
|
+ # ecdsa
|
|
|
|
+ # fluent.migrate
|
|
|
|
+ # more-itertools
|
|
|
|
+ # pytest
|
|
|
|
+ # responses
|
|
|
|
+-urllib3==1.25.9 \
|
|
|
|
+- --hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \
|
|
|
|
+- --hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115
|
|
|
|
+- # via requests
|
|
|
|
++urllib3==1.26.0 \
|
|
|
|
++ --hash=sha256:4849f132941d68144df0a3785ccc4fe423430ba5db0108d045c8cadbc90f517a \
|
|
|
|
++ --hash=sha256:bad31cb622ceee0ab46c4c884cf61957def0ff2e644de0a7a093678844c9ccac
|
|
|
|
++ # via
|
|
|
|
++ # -r requirements-mach-vendor-python.in
|
|
|
|
++ # requests
|
|
|
|
+ voluptuous==0.11.5 \
|
|
|
|
+ --hash=sha256:303542b3fc07fb52ec3d7a1c614b329cdbee13a9d681935353d8ea56a7bfa9f1 \
|
|
|
|
+ --hash=sha256:567a56286ef82a9d7ae0628c5842f65f516abcb496e74f3f59f1d7b28df314ef
|
|
|
|
+ # via -r requirements-mach-vendor-python.in
|
|
|
|
+ yamllint==1.23 \
|
|
|
|
+ --hash=sha256:0fa69bf8a86182b7fe14918bdd3a30354c869966bbc7cbfff176af71bda9c806 \
|
|
|
|
+ --hash=sha256:59f3ff77f44e7f46be6aecdb985830f73a1c51e290b7082a7d38c2ae1940f4a9
|
|
|
|
+ # via -r requirements-mach-vendor-python.in
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/LICENSE.txt b/third_party/python/urllib3/urllib3-1.25.9.dist-info/LICENSE.txt
|
|
|
|
+deleted file mode 100644
|
|
|
|
+--- a/third_party/python/urllib3/urllib3-1.25.9.dist-info/LICENSE.txt
|
|
|
|
++++ /dev/null
|
|
|
|
+@@ -1,21 +0,0 @@
|
|
|
|
+-MIT License
|
|
|
|
+-
|
|
|
|
+-Copyright (c) 2008-2019 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
|
|
|
|
+-
|
|
|
|
+-Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
+-of this software and associated documentation files (the "Software"), to deal
|
|
|
|
+-in the Software without restriction, including without limitation the rights
|
|
|
|
+-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
+-copies of the Software, and to permit persons to whom the Software is
|
|
|
|
+-furnished to do so, subject to the following conditions:
|
|
|
|
+-
|
|
|
|
+-The above copyright notice and this permission notice shall be included in all
|
|
|
|
+-copies or substantial portions of the Software.
|
|
|
|
+-
|
|
|
|
+-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
+-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
+-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
+-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
+-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
+-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
+-SOFTWARE.
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/METADATA b/third_party/python/urllib3/urllib3-1.25.9.dist-info/METADATA
|
|
|
|
+deleted file mode 100644
|
|
|
|
+--- a/third_party/python/urllib3/urllib3-1.25.9.dist-info/METADATA
|
|
|
|
++++ /dev/null
|
|
|
|
+@@ -1,1262 +0,0 @@
|
|
|
|
+-Metadata-Version: 2.1
|
|
|
|
+-Name: urllib3
|
|
|
|
+-Version: 1.25.9
|
|
|
|
+-Summary: HTTP library with thread-safe connection pooling, file post, and more.
|
|
|
|
+-Home-page: https://urllib3.readthedocs.io/
|
|
|
|
+-Author: Andrey Petrov
|
|
|
|
+-Author-email: andrey.petrov@shazow.net
|
|
|
|
+-License: MIT
|
|
|
|
+-Project-URL: Documentation, https://urllib3.readthedocs.io/
|
|
|
|
+-Project-URL: Code, https://github.com/urllib3/urllib3
|
|
|
|
+-Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
|
|
|
|
+-Keywords: urllib httplib threadsafe filepost http https ssl pooling
|
|
|
|
+-Platform: UNKNOWN
|
|
|
|
+-Classifier: Environment :: Web Environment
|
|
|
|
+-Classifier: Intended Audience :: Developers
|
|
|
|
+-Classifier: License :: OSI Approved :: MIT License
|
|
|
|
+-Classifier: Operating System :: OS Independent
|
|
|
|
+-Classifier: Programming Language :: Python
|
|
|
|
+-Classifier: Programming Language :: Python :: 2
|
|
|
|
+-Classifier: Programming Language :: Python :: 2.7
|
|
|
|
+-Classifier: Programming Language :: Python :: 3
|
|
|
|
+-Classifier: Programming Language :: Python :: 3.5
|
|
|
|
+-Classifier: Programming Language :: Python :: 3.6
|
|
|
|
+-Classifier: Programming Language :: Python :: 3.7
|
|
|
|
+-Classifier: Programming Language :: Python :: 3.8
|
|
|
|
+-Classifier: Programming Language :: Python :: 3.9
|
|
|
|
+-Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
|
|
+-Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
|
|
+-Classifier: Topic :: Internet :: WWW/HTTP
|
|
|
|
+-Classifier: Topic :: Software Development :: Libraries
|
|
|
|
+-Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
|
|
|
|
+-Provides-Extra: brotli
|
|
|
|
+-Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
|
|
|
|
+-Provides-Extra: secure
|
|
|
|
+-Requires-Dist: certifi ; extra == 'secure'
|
|
|
|
+-Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
|
|
|
|
+-Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
|
|
|
|
+-Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
|
|
|
|
+-Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
|
|
|
|
+-Provides-Extra: socks
|
|
|
|
+-Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
|
|
|
|
+-
|
|
|
|
+-urllib3
|
|
|
|
+-=======
|
|
|
|
+-
|
|
|
|
+-urllib3 is a powerful, *sanity-friendly* HTTP client for Python. Much of the
|
|
|
|
+-Python ecosystem already uses urllib3 and you should too.
|
|
|
|
+-urllib3 brings many critical features that are missing from the Python
|
|
|
|
+-standard libraries:
|
|
|
|
+-
|
|
|
|
+-- Thread safety.
|
|
|
|
+-- Connection pooling.
|
|
|
|
+-- Client-side SSL/TLS verification.
|
|
|
|
+-- File uploads with multipart encoding.
|
|
|
|
+-- Helpers for retrying requests and dealing with HTTP redirects.
|
|
|
|
+-- Support for gzip, deflate, and brotli encoding.
|
|
|
|
+-- Proxy support for HTTP and SOCKS.
|
|
|
|
+-- 100% test coverage.
|
|
|
|
+-
|
|
|
|
+-urllib3 is powerful and easy to use::
|
|
|
|
+-
|
|
|
|
+- >>> import urllib3
|
|
|
|
+- >>> http = urllib3.PoolManager()
|
|
|
|
+- >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
|
|
|
|
+- >>> r.status
|
|
|
|
+- 200
|
|
|
|
+- >>> r.data
|
|
|
|
+- 'User-agent: *\nDisallow: /deny\n'
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-Installing
|
|
|
|
+-----------
|
|
|
|
+-
|
|
|
|
+-urllib3 can be installed with `pip <https://pip.pypa.io>`_::
|
|
|
|
+-
|
|
|
|
+- $ pip install urllib3
|
|
|
|
+-
|
|
|
|
+-Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
|
|
|
|
+-
|
|
|
|
+- $ git clone git://github.com/urllib3/urllib3.git
|
|
|
|
+- $ python setup.py install
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-Documentation
|
|
|
|
+--------------
|
|
|
|
+-
|
|
|
|
+-urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-Contributing
|
|
|
|
+-------------
|
|
|
|
+-
|
|
|
|
+-urllib3 happily accepts contributions. Please see our
|
|
|
|
+-`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
|
|
|
|
+-for some tips on getting started.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-Security Disclosures
|
|
|
|
+---------------------
|
|
|
|
+-
|
|
|
|
+-To report a security vulnerability, please use the
|
|
|
|
+-`Tidelift security contact <https://tidelift.com/security>`_.
|
|
|
|
+-Tidelift will coordinate the fix and disclosure with maintainers.
|
|
|
|
+-
|
|
|
|
+-Maintainers
|
|
|
|
+------------
|
|
|
|
+-
|
|
|
|
+-- `@sethmlarson <https://github.com/sethmlarson>`_ (Seth M. Larson)
|
|
|
|
+-- `@pquentin <https://github.com/pquentin>`_ (Quentin Pradet)
|
|
|
|
+-- `@theacodes <https://github.com/theacodes>`_ (Thea Flowers)
|
|
|
|
+-- `@haikuginger <https://github.com/haikuginger>`_ (Jess Shapiro)
|
|
|
|
+-- `@lukasa <https://github.com/lukasa>`_ (Cory Benfield)
|
|
|
|
+-- `@sigmavirus24 <https://github.com/sigmavirus24>`_ (Ian Stapleton Cordasco)
|
|
|
|
+-- `@shazow <https://github.com/shazow>`_ (Andrey Petrov)
|
|
|
|
+-
|
|
|
|
+-👋
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-Sponsorship
|
|
|
|
+------------
|
|
|
|
+-
|
|
|
|
+-.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
|
|
|
|
+- :width: 75
|
|
|
|
+- :alt: Tidelift
|
|
|
|
+-
|
|
|
|
+-.. list-table::
|
|
|
|
+- :widths: 10 100
|
|
|
|
+-
|
|
|
|
+- * - |tideliftlogo|
|
|
|
|
+- - Professional support for urllib3 is available as part of the `Tidelift
|
|
|
|
+- Subscription`_. Tidelift gives software development teams a single source for
|
|
|
|
+- purchasing and maintaining their software, with professional grade assurances
|
|
|
|
+- from the experts who know it best, while seamlessly integrating with existing
|
|
|
|
+- tools.
|
|
|
|
+-
|
|
|
|
+-.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
|
|
|
|
+-
|
|
|
|
+-If your company benefits from this library, please consider `sponsoring its
|
|
|
|
+-development <https://urllib3.readthedocs.io/en/latest/contributing.html#sponsorship-project-grants>`_.
|
|
|
|
+-
|
|
|
|
+-Sponsors include:
|
|
|
|
+-
|
|
|
|
+-- Abbott (2018-2019), sponsored `@sethmlarson <https://github.com/sethmlarson>`_'s work on urllib3.
|
|
|
|
+-- Google Cloud Platform (2018-2019), sponsored `@theacodes <https://github.com/theacodes>`_'s work on urllib3.
|
|
|
|
+-- Akamai (2017-2018), sponsored `@haikuginger <https://github.com/haikuginger>`_'s work on urllib3
|
|
|
|
+-- Hewlett Packard Enterprise (2016-2017), sponsored `@Lukasa’s <https://github.com/Lukasa>`_ work on urllib3.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-Changes
|
|
|
|
+-=======
|
|
|
|
+-
|
|
|
|
+-1.25.9 (2020-04-16)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Added ``InvalidProxyConfigurationWarning`` which is raised when
|
|
|
|
+- erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
|
|
|
|
+- support connecting to HTTPS proxies but will soon be able to
|
|
|
|
+- and we would like users to migrate properly without much breakage.
|
|
|
|
+-
|
|
|
|
+- See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
|
|
|
|
+- for more information on how to fix your proxy config. (Pull #1851)
|
|
|
|
+-
|
|
|
|
+-* Drain connection after ``PoolManager`` redirect (Pull #1817)
|
|
|
|
+-
|
|
|
|
+-* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
|
|
|
|
+-
|
|
|
|
+-* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
|
|
|
|
+-
|
|
|
|
+-* Allow the CA certificate data to be passed as a string (Pull #1804)
|
|
|
|
+-
|
|
|
|
+-* Raise ``ValueError`` if method contains control characters (Pull #1800)
|
|
|
|
+-
|
|
|
|
+-* Add ``__repr__`` to ``Timeout`` (Pull #1795)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25.8 (2020-01-20)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Drop support for EOL Python 3.4 (Pull #1774)
|
|
|
|
+-
|
|
|
|
+-* Optimize _encode_invalid_chars (Pull #1787)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25.7 (2019-11-11)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
|
|
|
|
+-
|
|
|
|
+-* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
|
|
|
|
+-
|
|
|
|
+-* Fix issue where URL fragment was sent within the request target. (Pull #1732)
|
|
|
|
+-
|
|
|
|
+-* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
|
|
|
|
+-
|
|
|
|
+-* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25.6 (2019-09-24)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Fix issue where tilde (``~``) characters were incorrectly
|
|
|
|
+- percent-encoded in the path. (Pull #1692)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25.5 (2019-09-19)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
|
|
|
|
+- caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
|
|
|
|
+- (Issue #1682)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25.4 (2019-09-19)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
|
|
|
|
+-
|
|
|
|
+-* Fix edge case where Retry-After header was still respected even when
|
|
|
|
+- explicitly opted out of. (Pull #1607)
|
|
|
|
+-
|
|
|
|
+-* Remove dependency on ``rfc3986`` for URL parsing.
|
|
|
|
+-
|
|
|
|
+-* Fix issue where URLs containing invalid characters within ``Url.auth`` would
|
|
|
|
+- raise an exception instead of percent-encoding those characters.
|
|
|
|
+-
|
|
|
|
+-* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
|
|
|
|
+- work well with BufferedReaders and other ``io`` module features. (Pull #1652)
|
|
|
|
+-
|
|
|
|
+-* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25.3 (2019-05-23)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Change ``HTTPSConnection`` to load system CA certificates
|
|
|
|
+- when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
|
|
|
|
+- unspecified. (Pull #1608, Issue #1603)
|
|
|
|
+-
|
|
|
|
+-* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25.2 (2019-04-28)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
|
|
|
|
+-
|
|
|
|
+-* Change ``parse_url`` to percent-encode invalid characters within the
|
|
|
|
+- path, query, and target components. (Pull #1586)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25.1 (2019-04-24)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
|
|
|
|
+-
|
|
|
|
+-* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.25 (2019-04-22)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Require and validate certificates by default when using HTTPS (Pull #1507)
|
|
|
|
+-
|
|
|
|
+-* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
|
|
|
|
+-
|
|
|
|
+-* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
|
|
|
|
+- encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
|
|
|
|
+-
|
|
|
|
+-* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
|
|
|
|
+- implementations. (Pull #1496)
|
|
|
|
+-
|
|
|
|
+-* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, PR #1492)
|
|
|
|
+-
|
|
|
|
+-* Fixed issue where OpenSSL would block if an encrypted client private key was
|
|
|
|
+- given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
|
|
|
|
+-
|
|
|
|
+-* Added support for Brotli content encoding. It is enabled automatically if
|
|
|
|
+- ``brotlipy`` package is installed which can be requested with
|
|
|
|
+- ``urllib3[brotli]`` extra. (Pull #1532)
|
|
|
|
+-
|
|
|
|
+-* Drop ciphers using DSS key exchange from default TLS cipher suites.
|
|
|
|
+- Improve default ciphers when using SecureTransport. (Pull #1496)
|
|
|
|
+-
|
|
|
|
+-* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
|
|
|
|
+-
|
|
|
|
+-1.24.3 (2019-05-01)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Apply fix for CVE-2019-9740. (Pull #1591)
|
|
|
|
+-
|
|
|
|
+-1.24.2 (2019-04-17)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
|
|
|
|
+- ``ssl_context`` parameters are specified.
|
|
|
|
+-
|
|
|
|
+-* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
|
|
|
|
+-
|
|
|
|
+-* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.24.1 (2018-11-02)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
|
|
|
|
+-
|
|
|
|
+-* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.24 (2018-10-16)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
|
|
|
|
+-
|
|
|
|
+-* Test against Python 3.7 on AppVeyor. (Pull #1453)
|
|
|
|
+-
|
|
|
|
+-* Early-out ipv6 checks when running on App Engine. (Pull #1450)
|
|
|
|
+-
|
|
|
|
+-* Change ambiguous description of backoff_factor (Pull #1436)
|
|
|
|
+-
|
|
|
|
+-* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
|
|
|
|
+-
|
|
|
|
+-* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
|
|
|
|
+-
|
|
|
|
+-* Add a server_hostname parameter to HTTPSConnection which allows for
|
|
|
|
+- overriding the SNI hostname sent in the handshake. (Pull #1397)
|
|
|
|
+-
|
|
|
|
+-* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
|
|
|
|
+-
|
|
|
|
+-* Fixed bug where responses with header Content-Type: message/* erroneously
|
|
|
|
+- raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
|
|
|
|
+-
|
|
|
|
+-* Move urllib3 to src/urllib3 (Pull #1409)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.23 (2018-06-04)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Allow providing a list of headers to strip from requests when redirecting
|
|
|
|
+- to a different host. Defaults to the ``Authorization`` header. Different
|
|
|
|
+- headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
|
|
|
|
+-
|
|
|
|
+-* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
|
|
|
|
+-
|
|
|
|
+-* Dropped Python 3.3 support. (Pull #1242)
|
|
|
|
+-
|
|
|
|
+-* Put the connection back in the pool when calling stream() or read_chunked() on
|
|
|
|
+- a chunked HEAD response. (Issue #1234)
|
|
|
|
+-
|
|
|
|
+-* Fixed pyOpenSSL-specific ssl client authentication issue when clients
|
|
|
|
+- attempted to auth via certificate + chain (Issue #1060)
|
|
|
|
+-
|
|
|
|
+-* Add the port to the connectionpool connect print (Pull #1251)
|
|
|
|
+-
|
|
|
|
+-* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
|
|
|
|
+-
|
|
|
|
+-* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
|
|
|
|
+-
|
|
|
|
+-* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
|
|
|
|
+-
|
|
|
|
+-* Added support for auth info in url for SOCKS proxy (Pull #1363)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.22 (2017-07-20)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
|
|
|
|
+- IPv6 proxy. (Issue #1222)
|
|
|
|
+-
|
|
|
|
+-* Made the connection pool retry on ``SSLError``. The original ``SSLError``
|
|
|
|
+- is available on ``MaxRetryError.reason``. (Issue #1112)
|
|
|
|
+-
|
|
|
|
+-* Drain and release connection before recursing on retry/redirect. Fixes
|
|
|
|
+- deadlocks with a blocking connectionpool. (Issue #1167)
|
|
|
|
+-
|
|
|
|
+-* Fixed compatibility for cookiejar. (Issue #1229)
|
|
|
|
+-
|
|
|
|
+-* pyopenssl: Use vendored version of ``six``. (Issue #1231)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.21.1 (2017-05-02)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed SecureTransport issue that would cause long delays in response body
|
|
|
|
+- delivery. (Pull #1154)
|
|
|
|
+-
|
|
|
|
+-* Fixed regression in 1.21 that threw exceptions when users passed the
|
|
|
|
+- ``socket_options`` flag to the ``PoolManager``. (Issue #1165)
|
|
|
|
+-
|
|
|
|
+-* Fixed regression in 1.21 that threw exceptions when users passed the
|
|
|
|
+- ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
|
|
|
|
+- (Pull #1157)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.21 (2017-04-25)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Improved performance of certain selector system calls on Python 3.5 and
|
|
|
|
+- later. (Pull #1095)
|
|
|
|
+-
|
|
|
|
+-* Resolved issue where the PyOpenSSL backend would not wrap SysCallError
|
|
|
|
+- exceptions appropriately when sending data. (Pull #1125)
|
|
|
|
+-
|
|
|
|
+-* Selectors now detects a monkey-patched select module after import for modules
|
|
|
|
+- that patch the select module like eventlet, greenlet. (Pull #1128)
|
|
|
|
+-
|
|
|
|
+-* Reduced memory consumption when streaming zlib-compressed responses
|
|
|
|
+- (as opposed to raw deflate streams). (Pull #1129)
|
|
|
|
+-
|
|
|
|
+-* Connection pools now use the entire request context when constructing the
|
|
|
|
+- pool key. (Pull #1016)
|
|
|
|
+-
|
|
|
|
+-* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
|
|
|
|
+- ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
|
|
|
|
+- (Pull #1016)
|
|
|
|
+-
|
|
|
|
+-* Add retry counter for ``status_forcelist``. (Issue #1147)
|
|
|
|
+-
|
|
|
|
+-* Added ``contrib`` module for using SecureTransport on macOS:
|
|
|
|
+- ``urllib3.contrib.securetransport``. (Pull #1122)
|
|
|
|
+-
|
|
|
|
+-* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
|
|
|
|
+- for schemes it does not recognise, it assumes they are case-sensitive and
|
|
|
|
+- leaves them unchanged.
|
|
|
|
+- (Issue #1080)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.20 (2017-01-19)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Added support for waiting for I/O using selectors other than select,
|
|
|
|
+- improving urllib3's behaviour with large numbers of concurrent connections.
|
|
|
|
+- (Pull #1001)
|
|
|
|
+-
|
|
|
|
+-* Updated the date for the system clock check. (Issue #1005)
|
|
|
|
+-
|
|
|
|
+-* ConnectionPools now correctly consider hostnames to be case-insensitive.
|
|
|
|
+- (Issue #1032)
|
|
|
|
+-
|
|
|
|
+-* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
|
|
|
|
+- to fail when it is injected, rather than at first use. (Pull #1063)
|
|
|
|
+-
|
|
|
|
+-* Outdated versions of cryptography now cause the PyOpenSSL contrib module
|
|
|
|
+- to fail when it is injected, rather than at first use. (Issue #1044)
|
|
|
|
+-
|
|
|
|
+-* Automatically attempt to rewind a file-like body object when a request is
|
|
|
|
+- retried or redirected. (Pull #1039)
|
|
|
|
+-
|
|
|
|
+-* Fix some bugs that occur when modules incautiously patch the queue module.
|
|
|
|
+- (Pull #1061)
|
|
|
|
+-
|
|
|
|
+-* Prevent retries from occurring on read timeouts for which the request method
|
|
|
|
+- was not in the method whitelist. (Issue #1059)
|
|
|
|
+-
|
|
|
|
+-* Changed the PyOpenSSL contrib module to lazily load idna to avoid
|
|
|
|
+- unnecessarily bloating the memory of programs that don't need it. (Pull
|
|
|
|
+- #1076)
|
|
|
|
+-
|
|
|
|
+-* Add support for IPv6 literals with zone identifiers. (Pull #1013)
|
|
|
|
+-
|
|
|
|
+-* Added support for socks5h:// and socks4a:// schemes when working with SOCKS
|
|
|
|
+- proxies, and controlled remote DNS appropriately. (Issue #1035)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.19.1 (2016-11-16)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.19 (2016-11-03)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
|
|
|
|
+- using the default retry logic. (Pull #955)
|
|
|
|
+-
|
|
|
|
+-* Remove markers from setup.py to assist ancient setuptools versions. (Issue
|
|
|
|
+- #986)
|
|
|
|
+-
|
|
|
|
+-* Disallow superscripts and other integerish things in URL ports. (Issue #989)
|
|
|
|
+-
|
|
|
|
+-* Allow urllib3's HTTPResponse.stream() method to continue to work with
|
|
|
|
+- non-httplib underlying FPs. (Pull #990)
|
|
|
|
+-
|
|
|
|
+-* Empty filenames in multipart headers are now emitted as such, rather than
|
|
|
|
+- being suppressed. (Issue #1015)
|
|
|
|
+-
|
|
|
|
+-* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.18.1 (2016-10-27)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
|
|
|
|
+- PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
|
|
|
|
+- release fixes a vulnerability whereby urllib3 in the above configuration
|
|
|
|
+- would silently fail to validate TLS certificates due to erroneously setting
|
|
|
|
+- invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
|
|
|
|
+- flags do not cause a problem in OpenSSL versions before 1.1.0, which
|
|
|
|
+- interprets the presence of any flag as requesting certificate validation.
|
|
|
|
+-
|
|
|
|
+- There is no PR for this patch, as it was prepared for simultaneous disclosure
|
|
|
|
+- and release. The master branch received the same fix in PR #1010.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.18 (2016-09-26)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed incorrect message for IncompleteRead exception. (PR #973)
|
|
|
|
+-
|
|
|
|
+-* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
|
|
|
|
+- (Issue #258)
|
|
|
|
+-
|
|
|
|
+-* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
|
|
|
|
+- (Issue #977)
|
|
|
|
+-
|
|
|
|
+-* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.17 (2016-09-06)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
|
|
|
|
+-
|
|
|
|
+-* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
|
|
|
|
+-
|
|
|
|
+-* Substantially refactored documentation. (Issue #887)
|
|
|
|
+-
|
|
|
|
+-* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
|
|
|
|
+- (Issue #858)
|
|
|
|
+-
|
|
|
|
+-* Normalize the scheme and host in the URL parser (Issue #833)
|
|
|
|
+-
|
|
|
|
+-* ``HTTPResponse`` contains the last ``Retry`` object, which now also
|
|
|
|
+- contains retries history. (Issue #848)
|
|
|
|
+-
|
|
|
|
+-* Timeout can no longer be set as boolean, and must be greater than zero.
|
|
|
|
+- (PR #924)
|
|
|
|
+-
|
|
|
|
+-* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
|
|
|
|
+- now use cryptography and idna, both of which are already dependencies of
|
|
|
|
+- PyOpenSSL. (PR #930)
|
|
|
|
+-
|
|
|
|
+-* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
|
|
|
|
+-
|
|
|
|
+-* Try to use the operating system's certificates when we are using an
|
|
|
|
+- ``SSLContext``. (PR #941)
|
|
|
|
+-
|
|
|
|
+-* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
|
|
|
|
+- ChaCha20, but ChaCha20 is then preferred to everything else. (PR #947)
|
|
|
|
+-
|
|
|
|
+-* Updated cipher suite list to remove 3DES-based cipher suites. (PR #958)
|
|
|
|
+-
|
|
|
|
+-* Removed the cipher suite fallback to allow HIGH ciphers. (PR #958)
|
|
|
|
+-
|
|
|
|
+-* Implemented ``length_remaining`` to determine remaining content
|
|
|
|
+- to be read. (PR #949)
|
|
|
|
+-
|
|
|
|
+-* Implemented ``enforce_content_length`` to enable exceptions when
|
|
|
|
+- incomplete data chunks are received. (PR #949)
|
|
|
|
+-
|
|
|
|
+-* Dropped connection start, dropped connection reset, redirect, forced retry,
|
|
|
|
+- and new HTTPS connection log levels to DEBUG, from INFO. (PR #967)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.16 (2016-06-11)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
|
|
|
|
+-
|
|
|
|
+-* Provide ``key_fn_by_scheme`` pool keying mechanism that can be
|
|
|
|
+- overridden. (Issue #830)
|
|
|
|
+-
|
|
|
|
+-* Normalize scheme and host to lowercase for pool keys, and include
|
|
|
|
+- ``source_address``. (Issue #830)
|
|
|
|
+-
|
|
|
|
+-* Cleaner exception chain in Python 3 for ``_make_request``.
|
|
|
|
+- (Issue #861)
|
|
|
|
+-
|
|
|
|
+-* Fixed installing ``urllib3[socks]`` extra. (Issue #864)
|
|
|
|
+-
|
|
|
|
+-* Fixed signature of ``ConnectionPool.close`` so it can actually safely be
|
|
|
|
+- called by subclasses. (Issue #873)
|
|
|
|
+-
|
|
|
|
+-* Retain ``release_conn`` state across retries. (Issues #651, #866)
|
|
|
|
+-
|
|
|
|
+-* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
|
|
|
|
+- ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.15.1 (2016-04-11)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Fix packaging to include backports module. (Issue #841)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.15 (2016-04-06)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Added Retry(raise_on_status=False). (Issue #720)
|
|
|
|
+-
|
|
|
|
+-* Always use setuptools, no more distutils fallback. (Issue #785)
|
|
|
|
+-
|
|
|
|
+-* Dropped support for Python 3.2. (Issue #786)
|
|
|
|
+-
|
|
|
|
+-* Chunked transfer encoding when requesting with ``chunked=True``.
|
|
|
|
+- (Issue #790)
|
|
|
|
+-
|
|
|
|
+-* Fixed regression with IPv6 port parsing. (Issue #801)
|
|
|
|
+-
|
|
|
|
+-* Append SNIMissingWarning messages to allow users to specify it in
|
|
|
|
+- the PYTHONWARNINGS environment variable. (Issue #816)
|
|
|
|
+-
|
|
|
|
+-* Handle unicode headers in Py2. (Issue #818)
|
|
|
|
+-
|
|
|
|
+-* Log certificate when there is a hostname mismatch. (Issue #820)
|
|
|
|
+-
|
|
|
|
+-* Preserve order of request/response headers. (Issue #821)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.14 (2015-12-29)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* contrib: SOCKS proxy support! (Issue #762)
|
|
|
|
+-
|
|
|
|
+-* Fixed AppEngine handling of transfer-encoding header and bug
|
|
|
|
+- in Timeout defaults checking. (Issue #763)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.13.1 (2015-12-18)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.13 (2015-12-14)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
|
|
|
|
+-
|
|
|
|
+-* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
|
|
|
|
+-
|
|
|
|
+-* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
|
|
|
|
+-
|
|
|
|
+-* Close connections more defensively on exception. (Issue #734)
|
|
|
|
+-
|
|
|
|
+-* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
|
|
|
|
+- repeatedly flushing the decoder, to function better on Jython. (Issue #743)
|
|
|
|
+-
|
|
|
|
+-* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.12 (2015-09-03)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Rely on ``six`` for importing ``httplib`` to work around
|
|
|
|
+- conflicts with other Python 3 shims. (Issue #688)
|
|
|
|
+-
|
|
|
|
+-* Add support for directories of certificate authorities, as supported by
|
|
|
|
+- OpenSSL. (Issue #701)
|
|
|
|
+-
|
|
|
|
+-* New exception: ``NewConnectionError``, raised when we fail to establish
|
|
|
|
+- a new connection, usually ``ECONNREFUSED`` socket error.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.11 (2015-07-21)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* When ``ca_certs`` is given, ``cert_reqs`` defaults to
|
|
|
|
+- ``'CERT_REQUIRED'``. (Issue #650)
|
|
|
|
+-
|
|
|
|
+-* ``pip install urllib3[secure]`` will install Certifi and
|
|
|
|
+- PyOpenSSL as dependencies. (Issue #678)
|
|
|
|
+-
|
|
|
|
+-* Made ``HTTPHeaderDict`` usable as a ``headers`` input value
|
|
|
|
+- (Issues #632, #679)
|
|
|
|
+-
|
|
|
|
+-* Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
|
|
|
|
+- which has an ``AppEngineManager`` for using ``URLFetch`` in a
|
|
|
|
+- Google AppEngine environment. (Issue #664)
|
|
|
|
+-
|
|
|
|
+-* Dev: Added test suite for AppEngine. (Issue #631)
|
|
|
|
+-
|
|
|
|
+-* Fix performance regression when using PyOpenSSL. (Issue #626)
|
|
|
|
+-
|
|
|
|
+-* Passing incorrect scheme (e.g. ``foo://``) will raise
|
|
|
|
+- ``ValueError`` instead of ``AssertionError`` (backwards
|
|
|
|
+- compatible for now, but please migrate). (Issue #640)
|
|
|
|
+-
|
|
|
|
+-* Fix pools not getting replenished when an error occurs during a
|
|
|
|
+- request using ``release_conn=False``. (Issue #644)
|
|
|
|
+-
|
|
|
|
+-* Fix pool-default headers not applying for url-encoded requests
|
|
|
|
+- like GET. (Issue #657)
|
|
|
|
+-
|
|
|
|
+-* log.warning in Python 3 when headers are skipped due to parsing
|
|
|
|
+- errors. (Issue #642)
|
|
|
|
+-
|
|
|
|
+-* Close and discard connections if an error occurs during read.
|
|
|
|
+- (Issue #660)
|
|
|
|
+-
|
|
|
|
+-* Fix host parsing for IPv6 proxies. (Issue #668)
|
|
|
|
+-
|
|
|
|
+-* Separate warning type SubjectAltNameWarning, now issued once
|
|
|
|
+- per host. (Issue #671)
|
|
|
|
+-
|
|
|
|
+-* Fix ``httplib.IncompleteRead`` not getting converted to
|
|
|
|
+- ``ProtocolError`` when using ``HTTPResponse.stream()``
|
|
|
|
+- (Issue #674)
|
|
|
|
+-
|
|
|
|
+-1.10.4 (2015-05-03)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Migrate tests to Tornado 4. (Issue #594)
|
|
|
|
+-
|
|
|
|
+-* Append default warning configuration rather than overwrite.
|
|
|
|
+- (Issue #603)
|
|
|
|
+-
|
|
|
|
+-* Fix streaming decoding regression. (Issue #595)
|
|
|
|
+-
|
|
|
|
+-* Fix chunked requests losing state across keep-alive connections.
|
|
|
|
+- (Issue #599)
|
|
|
|
+-
|
|
|
|
+-* Fix hanging when chunked HEAD response has no body. (Issue #605)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.10.3 (2015-04-21)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
|
|
|
|
+- (Issue #558)
|
|
|
|
+-
|
|
|
|
+-* Fix regression of duplicate header keys being discarded.
|
|
|
|
+- (Issue #563)
|
|
|
|
+-
|
|
|
|
+-* ``Response.stream()`` returns a generator for chunked responses.
|
|
|
|
+- (Issue #560)
|
|
|
|
+-
|
|
|
|
+-* Set upper-bound timeout when waiting for a socket in PyOpenSSL.
|
|
|
|
+- (Issue #585)
|
|
|
|
+-
|
|
|
|
+-* Work on platforms without `ssl` module for plain HTTP requests.
|
|
|
|
+- (Issue #587)
|
|
|
|
+-
|
|
|
|
+-* Stop relying on the stdlib's default cipher list. (Issue #588)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.10.2 (2015-02-25)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Fix file descriptor leakage on retries. (Issue #548)
|
|
|
|
+-
|
|
|
|
+-* Removed RC4 from default cipher list. (Issue #551)
|
|
|
|
+-
|
|
|
|
+-* Header performance improvements. (Issue #544)
|
|
|
|
+-
|
|
|
|
+-* Fix PoolManager not obeying redirect retry settings. (Issue #553)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.10.1 (2015-02-10)
|
|
|
|
+--------------------
|
|
|
|
+-
|
|
|
|
+-* Pools can be used as context managers. (Issue #545)
|
|
|
|
+-
|
|
|
|
+-* Don't re-use connections which experienced an SSLError. (Issue #529)
|
|
|
|
+-
|
|
|
|
+-* Don't fail when gzip decoding an empty stream. (Issue #535)
|
|
|
|
+-
|
|
|
|
+-* Add sha256 support for fingerprint verification. (Issue #540)
|
|
|
|
+-
|
|
|
|
+-* Fixed handling of header values containing commas. (Issue #533)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.10 (2014-12-14)
|
|
|
|
+------------------
|
|
|
|
+-
|
|
|
|
+-* Disabled SSLv3. (Issue #473)
|
|
|
|
+-
|
|
|
|
+-* Add ``Url.url`` property to return the composed url string. (Issue #394)
|
|
|
|
+-
|
|
|
|
+-* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
|
|
|
|
+-
|
|
|
|
+-* ``MaxRetryError.reason`` will always be an exception, not string.
|
|
|
|
+- (Issue #481)
|
|
|
|
+-
|
|
|
|
+-* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
|
|
|
|
+-
|
|
|
|
+-* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
|
|
|
|
+-
|
|
|
|
+-* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
|
|
|
|
+- (Issue #496)
|
|
|
|
+-
|
|
|
|
+-* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
|
|
|
|
+- (Issue #499)
|
|
|
|
+-
|
|
|
|
+-* Close and discard sockets which experienced SSL-related errors.
|
|
|
|
+- (Issue #501)
|
|
|
|
+-
|
|
|
|
+-* Handle ``body`` param in ``.request(...)``. (Issue #513)
|
|
|
|
+-
|
|
|
|
+-* Respect timeout with HTTPS proxy. (Issue #505)
|
|
|
|
+-
|
|
|
|
+-* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.9.1 (2014-09-13)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Apply socket arguments before binding. (Issue #427)
|
|
|
|
+-
|
|
|
|
+-* More careful checks if fp-like object is closed. (Issue #435)
|
|
|
|
+-
|
|
|
|
+-* Fixed packaging issues of some development-related files not
|
|
|
|
+- getting included. (Issue #440)
|
|
|
|
+-
|
|
|
|
+-* Allow performing *only* fingerprint verification. (Issue #444)
|
|
|
|
+-
|
|
|
|
+-* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
|
|
|
|
+-
|
|
|
|
+-* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
|
|
|
|
+-
|
|
|
|
+-* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
|
|
|
|
+- (Issue #443)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.9 (2014-07-04)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Shuffled around development-related files. If you're maintaining a distro
|
|
|
|
+- package of urllib3, you may need to tweak things. (Issue #415)
|
|
|
|
+-
|
|
|
|
+-* Unverified HTTPS requests will trigger a warning on the first request. See
|
|
|
|
+- our new `security documentation
|
|
|
|
+- <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
|
|
|
|
+- (Issue #426)
|
|
|
|
+-
|
|
|
|
+-* New retry logic and ``urllib3.util.retry.Retry`` configuration object.
|
|
|
|
+- (Issue #326)
|
|
|
|
+-
|
|
|
|
+-* All raised exceptions should now wrapped in a
|
|
|
|
+- ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
|
|
|
|
+-
|
|
|
|
+-* All errors during a retry-enabled request should be wrapped in
|
|
|
|
+- ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
|
|
|
|
+- which were previously exempt. Underlying error is accessible from the
|
|
|
|
+- ``.reason`` property. (Issue #326)
|
|
|
|
+-
|
|
|
|
+-* ``urllib3.exceptions.ConnectionError`` renamed to
|
|
|
|
+- ``urllib3.exceptions.ProtocolError``. (Issue #326)
|
|
|
|
+-
|
|
|
|
+-* Errors during response read (such as IncompleteRead) are now wrapped in
|
|
|
|
+- ``urllib3.exceptions.ProtocolError``. (Issue #418)
|
|
|
|
+-
|
|
|
|
+-* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
|
|
|
|
+- (Issue #417)
|
|
|
|
+-
|
|
|
|
+-* Catch read timeouts over SSL connections as
|
|
|
|
+- ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
|
|
|
|
+-
|
|
|
|
+-* Apply socket arguments before connecting. (Issue #427)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.8.3 (2014-06-23)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
|
|
|
|
+-
|
|
|
|
+-* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
|
|
|
|
+-
|
|
|
|
+-* Wrap ``socket.timeout`` exception with
|
|
|
|
+- ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
|
|
|
|
+-
|
|
|
|
+-* Fixed proxy-related bug where connections were being reused incorrectly.
|
|
|
|
+- (Issues #366, #369)
|
|
|
|
+-
|
|
|
|
+-* Added ``socket_options`` keyword parameter which allows to define
|
|
|
|
+- ``setsockopt`` configuration of new sockets. (Issue #397)
|
|
|
|
+-
|
|
|
|
+-* Removed ``HTTPConnection.tcp_nodelay`` in favor of
|
|
|
|
+- ``HTTPConnection.default_socket_options``. (Issue #397)
|
|
|
|
+-
|
|
|
|
+-* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.8.2 (2014-04-17)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Fix ``urllib3.util`` not being included in the package.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.8.1 (2014-04-17)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
|
|
|
|
+-
|
|
|
|
+-* Don't install ``dummyserver`` into ``site-packages`` as it's only needed
|
|
|
|
+- for the test suite. (Issue #362)
|
|
|
|
+-
|
|
|
|
+-* Added support for specifying ``source_address``. (Issue #352)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.8 (2014-03-04)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
|
|
|
|
+- username, and blank ports like 'hostname:').
|
|
|
|
+-
|
|
|
|
+-* New ``urllib3.connection`` module which contains all the HTTPConnection
|
|
|
|
+- objects.
|
|
|
|
+-
|
|
|
|
+-* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
|
|
|
|
+- signature to a more sensible order. [Backwards incompatible]
|
|
|
|
+- (Issues #252, #262, #263)
|
|
|
|
+-
|
|
|
|
+-* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
|
|
|
|
+-
|
|
|
|
+-* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
|
|
|
|
+- returns the number of bytes read so far. (Issue #277)
|
|
|
|
+-
|
|
|
|
+-* Support for platforms without threading. (Issue #289)
|
|
|
|
+-
|
|
|
|
+-* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
|
|
|
|
+- to allow a pool with no specified port to be considered equal to to an
|
|
|
|
+- HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
|
|
|
|
+-
|
|
|
|
+-* Improved default SSL/TLS settings to avoid vulnerabilities.
|
|
|
|
+- (Issue #309)
|
|
|
|
+-
|
|
|
|
+-* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
|
|
|
|
+- (Issue #310)
|
|
|
|
+-
|
|
|
|
+-* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
|
|
|
|
+- will send the entire HTTP request ~200 milliseconds faster; however, some of
|
|
|
|
+- the resulting TCP packets will be smaller. (Issue #254)
|
|
|
|
+-
|
|
|
|
+-* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
|
|
|
|
+- from the default 64 to 1024 in a single certificate. (Issue #318)
|
|
|
|
+-
|
|
|
|
+-* Headers are now passed and stored as a custom
|
|
|
|
+- ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
|
|
|
|
+- (Issue #329, #333)
|
|
|
|
+-
|
|
|
|
+-* Headers no longer lose their case on Python 3. (Issue #236)
|
|
|
|
+-
|
|
|
|
+-* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
|
|
|
|
+- certificates on inject. (Issue #332)
|
|
|
|
+-
|
|
|
|
+-* Requests with ``retries=False`` will immediately raise any exceptions without
|
|
|
|
+- wrapping them in ``MaxRetryError``. (Issue #348)
|
|
|
|
+-
|
|
|
|
+-* Fixed open socket leak with SSL-related failures. (Issue #344, #348)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.7.1 (2013-09-25)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Added granular timeout support with new ``urllib3.util.Timeout`` class.
|
|
|
|
+- (Issue #231)
|
|
|
|
+-
|
|
|
|
+-* Fixed Python 3.4 support. (Issue #238)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.7 (2013-08-14)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* More exceptions are now pickle-able, with tests. (Issue #174)
|
|
|
|
+-
|
|
|
|
+-* Fixed redirecting with relative URLs in Location header. (Issue #178)
|
|
|
|
+-
|
|
|
|
+-* Support for relative urls in ``Location: ...`` header. (Issue #179)
|
|
|
|
+-
|
|
|
|
+-* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
|
|
|
|
+- file-like functionality. (Issue #187)
|
|
|
|
+-
|
|
|
|
+-* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
|
|
|
|
+- skip hostname verification for SSL connections. (Issue #194)
|
|
|
|
+-
|
|
|
|
+-* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
|
|
|
|
+- generator wrapped around ``.read(...)``. (Issue #198)
|
|
|
|
+-
|
|
|
|
+-* IPv6 url parsing enforces brackets around the hostname. (Issue #199)
|
|
|
|
+-
|
|
|
|
+-* Fixed thread race condition in
|
|
|
|
+- ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
|
|
|
|
+-
|
|
|
|
+-* ``ProxyManager`` requests now include non-default port in ``Host: ...``
|
|
|
|
+- header. (Issue #217)
|
|
|
|
+-
|
|
|
|
+-* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
|
|
|
|
+-
|
|
|
|
+-* New ``RequestField`` object can be passed to the ``fields=...`` param which
|
|
|
|
+- can specify headers. (Issue #220)
|
|
|
|
+-
|
|
|
|
+-* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
|
|
|
|
+- (Issue #221)
|
|
|
|
+-
|
|
|
|
+-* Use international headers when posting file names. (Issue #119)
|
|
|
|
+-
|
|
|
|
+-* Improved IPv6 support. (Issue #203)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.6 (2013-04-25)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
|
|
|
|
+-
|
|
|
|
+-* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
|
|
|
|
+-
|
|
|
|
+-* Improved SSL-related code. ``cert_req`` now optionally takes a string like
|
|
|
|
+- "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
|
|
|
|
+- The string values reflect the suffix of the respective constant variable.
|
|
|
|
+- (Issue #130)
|
|
|
|
+-
|
|
|
|
+-* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
|
|
|
|
+- closed proxy connections and larger read buffers. (Issue #135)
|
|
|
|
+-
|
|
|
|
+-* Ensure the connection is closed if no data is received, fixes connection leak
|
|
|
|
+- on some platforms. (Issue #133)
|
|
|
|
+-
|
|
|
|
+-* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
|
|
|
|
+-
|
|
|
|
+-* Tests fixed to be compatible with Py26 again. (Issue #125)
|
|
|
|
+-
|
|
|
|
+-* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
|
|
|
|
+- to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
|
|
|
|
+-
|
|
|
|
+-* Allow an explicit content type to be specified when encoding file fields.
|
|
|
|
+- (Issue #126)
|
|
|
|
+-
|
|
|
|
+-* Exceptions are now pickleable, with tests. (Issue #101)
|
|
|
|
+-
|
|
|
|
+-* Fixed default headers not getting passed in some cases. (Issue #99)
|
|
|
|
+-
|
|
|
|
+-* Treat "content-encoding" header value as case-insensitive, per RFC 2616
|
|
|
|
+- Section 3.5. (Issue #110)
|
|
|
|
+-
|
|
|
|
+-* "Connection Refused" SocketErrors will get retried rather than raised.
|
|
|
|
+- (Issue #92)
|
|
|
|
+-
|
|
|
|
+-* Updated vendored ``six``, no longer overrides the global ``six`` module
|
|
|
|
+- namespace. (Issue #113)
|
|
|
|
+-
|
|
|
|
+-* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
|
|
|
|
+- the exception that prompted the final retry. If ``reason is None`` then it
|
|
|
|
+- was due to a redirect. (Issue #92, #114)
|
|
|
|
+-
|
|
|
|
+-* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
|
|
|
|
+- (Issue #149)
|
|
|
|
+-
|
|
|
|
+-* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
|
|
|
|
+- that are not files. (Issue #111)
|
|
|
|
+-
|
|
|
|
+-* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
|
|
|
|
+-
|
|
|
|
+-* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
|
|
|
|
+- against an arbitrary hostname (when connecting by IP or for misconfigured
|
|
|
|
+- servers). (Issue #140)
|
|
|
|
+-
|
|
|
|
+-* Streaming decompression support. (Issue #159)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.5 (2012-08-02)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
|
|
|
|
+- logging in urllib3.
|
|
|
|
+-
|
|
|
|
+-* Native full URL parsing (including auth, path, query, fragment) available in
|
|
|
|
+- ``urllib3.util.parse_url(url)``.
|
|
|
|
+-
|
|
|
|
+-* Built-in redirect will switch method to 'GET' if status code is 303.
|
|
|
|
+- (Issue #11)
|
|
|
|
+-
|
|
|
|
+-* ``urllib3.PoolManager`` strips the scheme and host before sending the request
|
|
|
|
+- uri. (Issue #8)
|
|
|
|
+-
|
|
|
|
+-* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
|
|
|
|
+- based on the Content-Type header, fails.
|
|
|
|
+-
|
|
|
|
+-* Fixed bug with pool depletion and leaking connections (Issue #76). Added
|
|
|
|
+- explicit connection closing on pool eviction. Added
|
|
|
|
+- ``urllib3.PoolManager.clear()``.
|
|
|
|
+-
|
|
|
|
+-* 99% -> 100% unit test coverage.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.4 (2012-06-16)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Minor AppEngine-related fixes.
|
|
|
|
+-
|
|
|
|
+-* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
|
|
|
|
+-
|
|
|
|
+-* Improved url parsing. (Issue #73)
|
|
|
|
+-
|
|
|
|
+-* IPv6 url support. (Issue #72)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.3 (2012-03-25)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Removed pre-1.0 deprecated API.
|
|
|
|
+-
|
|
|
|
+-* Refactored helpers into a ``urllib3.util`` submodule.
|
|
|
|
+-
|
|
|
|
+-* Fixed multipart encoding to support list-of-tuples for keys with multiple
|
|
|
|
+- values. (Issue #48)
|
|
|
|
+-
|
|
|
|
+-* Fixed multiple Set-Cookie headers in response not getting merged properly in
|
|
|
|
+- Python 3. (Issue #53)
|
|
|
|
+-
|
|
|
|
+-* AppEngine support with Py27. (Issue #61)
|
|
|
|
+-
|
|
|
|
+-* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
|
|
|
|
+- bytes.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.2.2 (2012-02-06)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.2.1 (2012-02-05)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
|
|
|
|
+-
|
|
|
|
+-* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
|
|
|
|
+- which inherits from ``ValueError``.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.2 (2012-01-29)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Added Python 3 support (tested on 3.2.2)
|
|
|
|
+-
|
|
|
|
+-* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
|
|
|
|
+-
|
|
|
|
+-* Use ``select.poll`` instead of ``select.select`` for platforms that support
|
|
|
|
+- it.
|
|
|
|
+-
|
|
|
|
+-* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
|
|
|
|
+- connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
|
|
|
|
+-
|
|
|
|
+-* Fixed ``ImportError`` during install when ``ssl`` module is not available.
|
|
|
|
+- (Issue #41)
|
|
|
|
+-
|
|
|
|
+-* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
|
|
|
|
+- completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
|
|
|
|
+-
|
|
|
|
+-* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
|
|
|
|
+- ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
|
|
|
|
+- Added socket-level tests.
|
|
|
|
+-
|
|
|
|
+-* More tests. Achievement Unlocked: 99% Coverage.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.1 (2012-01-07)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Refactored ``dummyserver`` to its own root namespace module (used for
|
|
|
|
+- testing).
|
|
|
|
+-
|
|
|
|
+-* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
|
|
|
|
+- Py32's ``ssl_match_hostname``. (Issue #25)
|
|
|
|
+-
|
|
|
|
+-* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
|
|
|
|
+-
|
|
|
|
+-* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
|
|
|
|
+- #27)
|
|
|
|
+-
|
|
|
|
+-* Fixed timeout-related bugs. (Issues #17, #23)
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.0.2 (2011-11-04)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
|
|
|
|
+- you're using the object manually. (Thanks pyos)
|
|
|
|
+-
|
|
|
|
+-* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
|
|
|
|
+- wrapping the access log in a mutex. (Thanks @christer)
|
|
|
|
+-
|
|
|
|
+-* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
|
|
|
|
+- ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.0.1 (2011-10-10)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Fixed a bug where the same connection would get returned into the pool twice,
|
|
|
|
+- causing extraneous "HttpConnectionPool is full" log warnings.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-1.0 (2011-10-08)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Added ``PoolManager`` with LRU expiration of connections (tested and
|
|
|
|
+- documented).
|
|
|
|
+-* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
|
|
|
|
+- with HTTPS proxies).
|
|
|
|
+-* Added optional partial-read support for responses when
|
|
|
|
+- ``preload_content=False``. You can now make requests and just read the headers
|
|
|
|
+- without loading the content.
|
|
|
|
+-* Made response decoding optional (default on, same as before).
|
|
|
|
+-* Added optional explicit boundary string for ``encode_multipart_formdata``.
|
|
|
|
+-* Convenience request methods are now inherited from ``RequestMethods``. Old
|
|
|
|
+- helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
|
|
|
|
+- the new ``request(method, url, ...)``.
|
|
|
|
+-* Refactored code to be even more decoupled, reusable, and extendable.
|
|
|
|
+-* License header added to ``.py`` files.
|
|
|
|
+-* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
|
|
|
|
+- and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
|
|
|
|
+-* Embettered all the things!
|
|
|
|
+-* Started writing this file.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-0.4.1 (2011-07-17)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Minor bug fixes, code cleanup.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-0.4 (2011-03-01)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Better unicode support.
|
|
|
|
+-* Added ``VerifiedHTTPSConnection``.
|
|
|
|
+-* Added ``NTLMConnectionPool`` in contrib.
|
|
|
|
+-* Minor improvements.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-0.3.1 (2010-07-13)
|
|
|
|
+-------------------
|
|
|
|
+-
|
|
|
|
+-* Added ``assert_host_name`` optional parameter. Now compatible with proxies.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-0.3 (2009-12-10)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Added HTTPS support.
|
|
|
|
+-* Minor bug fixes.
|
|
|
|
+-* Refactored, broken backwards compatibility with 0.2.
|
|
|
|
+-* API to be treated as stable from this version forward.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-0.2 (2008-11-17)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* Added unit tests.
|
|
|
|
+-* Bug fixes.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-0.1 (2008-11-16)
|
|
|
|
+-----------------
|
|
|
|
+-
|
|
|
|
+-* First release.
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/RECORD b/third_party/python/urllib3/urllib3-1.25.9.dist-info/RECORD
|
|
|
|
+deleted file mode 100644
|
|
|
|
+--- a/third_party/python/urllib3/urllib3-1.25.9.dist-info/RECORD
|
|
|
|
++++ /dev/null
|
|
|
|
+@@ -1,41 +0,0 @@
|
|
|
|
+-urllib3/__init__.py,sha256=rdFZCO1L7e8861ZTvo8AiSKwxCe9SnWQUQwJ599YV9c,2683
|
|
|
|
+-urllib3/_collections.py,sha256=GouVsNzwg6jADZTmimMI6oqmwKSswnMo9dh5tGNVWO4,10792
|
|
|
|
+-urllib3/connection.py,sha256=Fln8a_bkegdNMkFoSOwyI0PJvL1OqzVUO6ifihKOTpc,14461
|
|
|
|
+-urllib3/connectionpool.py,sha256=egdaX-Db_LVXifDxv3JY0dHIpQqDv0wC0_9Eeh8FkPM,35725
|
|
|
|
+-urllib3/exceptions.py,sha256=D2Jvab7M7m_n0rnmBmq481paoVT32VvVeB6VeQM0y-w,7172
|
|
|
|
+-urllib3/fields.py,sha256=kroD76QK-GdHHW7f_AUN4XxDC3OQPI2FFrS9eSL4BCs,8553
|
|
|
|
+-urllib3/filepost.py,sha256=vj0qbrpT1AFzvvW4SuC8M5kJiw7wftHcSr-7b8UpPpw,2440
|
|
|
|
+-urllib3/poolmanager.py,sha256=iWEAIGrVNGoOmQyfiFwCqG-IyYy6GIQ-jJ9QCsX9li4,17861
|
|
|
|
+-urllib3/request.py,sha256=hhoHvEEatyd9Tn5EbGjQ0emn-ENMCyY591yNWTneINA,6018
|
|
|
|
+-urllib3/response.py,sha256=eo1Sfkn2x44FtjgP3qwwDsG9ak84spQAxEGy7Ovd4Pc,28221
|
|
|
|
+-urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
|
+-urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
|
|
|
|
+-urllib3/contrib/appengine.py,sha256=9RyUW5vKy4VPa2imtwBNWYKILrypr-K6UXEHUYsf0JY,11010
|
|
|
|
+-urllib3/contrib/ntlmpool.py,sha256=a402AwGN_Ll3N-4ur_AS6UrU-ycUtlnYqoBF76lORg8,4160
|
|
|
|
+-urllib3/contrib/pyopenssl.py,sha256=qQKqQXvlSvpCa2yEPxpdv18lS71SMESr9XzH9K9x3KI,16565
|
|
|
|
+-urllib3/contrib/securetransport.py,sha256=vBDFjSnH2gWa-ztMKVaiwW46K1mlDZKqvo_VAonfdcY,32401
|
|
|
|
+-urllib3/contrib/socks.py,sha256=nzDMgDIFJWVubKHqvIn2-SKCO91hhJInP92WgHChGzA,7036
|
|
|
|
+-urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
|
+-urllib3/contrib/_securetransport/bindings.py,sha256=mullWYFaghBdRWla6HYU-TBgFRTPLBEfxj3jplbeJmQ,16886
|
|
|
|
+-urllib3/contrib/_securetransport/low_level.py,sha256=V7GnujxnWZh2N2sMsV5N4d9Imymokkm3zBwgt77_bSE,11956
|
|
|
|
+-urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
|
|
|
|
+-urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
|
|
|
|
+-urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
|
+-urllib3/packages/backports/makefile.py,sha256=005wrvH-_pWSnTFqQ2sdzzh4zVCtQUUQ4mR2Yyxwc0A,1418
|
|
|
|
+-urllib3/packages/ssl_match_hostname/__init__.py,sha256=ywgKMtfHi1-DrXlzPfVAhzsLzzqcK7GT6eLgdode1Fg,688
|
|
|
|
+-urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
|
|
|
|
+-urllib3/util/__init__.py,sha256=bWNaav_OT-1L7-sxm59cGb59rDORlbhb_4noduM5m0U,1038
|
|
|
|
+-urllib3/util/connection.py,sha256=NsxUAKQ98GKywta--zg57CdVpeTCI6N-GElCq78Dl8U,4637
|
|
|
|
+-urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497
|
|
|
|
+-urllib3/util/request.py,sha256=C-6-AWffxZG03AdRGoY59uqsn4CVItKU6gjxz7Hc3Mc,3815
|
|
|
|
+-urllib3/util/response.py,sha256=_WbTQr8xRQuJuY2rTIZxVdJD6mnEOtQupjaK_bF_Vj8,2573
|
|
|
|
+-urllib3/util/retry.py,sha256=3wbv7SdzYNOxPcBiFkPCubTbK1_6vWSepznOXirhUfA,15543
|
|
|
|
+-urllib3/util/ssl_.py,sha256=R64MEN6Bh-YJq8b14kCb6hbV8L1p8oq4rcZiBow3tTQ,14511
|
|
|
|
+-urllib3/util/timeout.py,sha256=3qawUo-TZq4q7tyeRToMIOdNGEOBjOOQVq7nHnLryP4,9947
|
|
|
|
+-urllib3/util/url.py,sha256=jvkBGN64wo_Mx6Q6JYpFCGxamxbI2NdFoNQVTr7PUOM,13964
|
|
|
|
+-urllib3/util/wait.py,sha256=k46KzqIYu3Vnzla5YW3EvtInNlU_QycFqQAghIOxoAg,5406
|
|
|
|
+-urllib3-1.25.9.dist-info/LICENSE.txt,sha256=fA0TbuBYU4mt8tJWcbuZaHofdZKfRlt_Fu4_Ado3JV4,1115
|
|
|
|
+-urllib3-1.25.9.dist-info/METADATA,sha256=QVc-HCXpe7Dm_RDmd-GpzKT-LvxBgwsPsLEiE5kUjEI,39852
|
|
|
|
+-urllib3-1.25.9.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
|
|
|
|
+-urllib3-1.25.9.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
|
|
|
|
+-urllib3-1.25.9.dist-info/RECORD,,
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/WHEEL b/third_party/python/urllib3/urllib3-1.25.9.dist-info/WHEEL
|
|
|
|
+deleted file mode 100644
|
|
|
|
+--- a/third_party/python/urllib3/urllib3-1.25.9.dist-info/WHEEL
|
|
|
|
++++ /dev/null
|
|
|
|
+@@ -1,6 +0,0 @@
|
|
|
|
+-Wheel-Version: 1.0
|
|
|
|
+-Generator: bdist_wheel (0.34.2)
|
|
|
|
+-Root-Is-Purelib: true
|
|
|
|
+-Tag: py2-none-any
|
|
|
|
+-Tag: py3-none-any
|
|
|
|
+-
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt b/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt
|
|
|
|
+@@ -0,0 +1,21 @@
|
|
|
|
++MIT License
|
|
|
|
++
|
|
|
|
++Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
|
|
|
|
++
|
|
|
|
++Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
|
|
++of this software and associated documentation files (the "Software"), to deal
|
|
|
|
++in the Software without restriction, including without limitation the rights
|
|
|
|
++to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
|
|
++copies of the Software, and to permit persons to whom the Software is
|
|
|
|
++furnished to do so, subject to the following conditions:
|
|
|
|
++
|
|
|
|
++The above copyright notice and this permission notice shall be included in all
|
|
|
|
++copies or substantial portions of the Software.
|
|
|
|
++
|
|
|
|
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
|
|
++IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
|
|
++FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
|
|
++AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
++LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
|
|
++OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
|
|
++SOFTWARE.
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA b/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA
|
|
|
|
+@@ -0,0 +1,1335 @@
|
|
|
|
++Metadata-Version: 2.1
|
|
|
|
++Name: urllib3
|
|
|
|
++Version: 1.26.0
|
|
|
|
++Summary: HTTP library with thread-safe connection pooling, file post, and more.
|
|
|
|
++Home-page: https://urllib3.readthedocs.io/
|
|
|
|
++Author: Andrey Petrov
|
|
|
|
++Author-email: andrey.petrov@shazow.net
|
|
|
|
++License: MIT
|
|
|
|
++Project-URL: Documentation, https://urllib3.readthedocs.io/
|
|
|
|
++Project-URL: Code, https://github.com/urllib3/urllib3
|
|
|
|
++Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
|
|
|
|
++Keywords: urllib httplib threadsafe filepost http https ssl pooling
|
|
|
|
++Platform: UNKNOWN
|
|
|
|
++Classifier: Environment :: Web Environment
|
|
|
|
++Classifier: Intended Audience :: Developers
|
|
|
|
++Classifier: License :: OSI Approved :: MIT License
|
|
|
|
++Classifier: Operating System :: OS Independent
|
|
|
|
++Classifier: Programming Language :: Python
|
|
|
|
++Classifier: Programming Language :: Python :: 2
|
|
|
|
++Classifier: Programming Language :: Python :: 2.7
|
|
|
|
++Classifier: Programming Language :: Python :: 3
|
|
|
|
++Classifier: Programming Language :: Python :: 3.5
|
|
|
|
++Classifier: Programming Language :: Python :: 3.6
|
|
|
|
++Classifier: Programming Language :: Python :: 3.7
|
|
|
|
++Classifier: Programming Language :: Python :: 3.8
|
|
|
|
++Classifier: Programming Language :: Python :: 3.9
|
|
|
|
++Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
|
|
++Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
|
|
++Classifier: Topic :: Internet :: WWW/HTTP
|
|
|
|
++Classifier: Topic :: Software Development :: Libraries
|
|
|
|
++Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
|
|
|
|
++Description-Content-Type: text/x-rst
|
|
|
|
++Provides-Extra: brotli
|
|
|
|
++Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
|
|
|
|
++Provides-Extra: secure
|
|
|
|
++Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
|
|
|
|
++Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
|
|
|
|
++Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
|
|
|
|
++Requires-Dist: certifi ; extra == 'secure'
|
|
|
|
++Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
|
|
|
|
++Provides-Extra: socks
|
|
|
|
++Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
|
|
|
|
++Python ecosystem already uses urllib3 and you should too.
|
|
|
|
++urllib3 brings many critical features that are missing from the Python
|
|
|
|
++standard libraries:
|
|
|
|
++
|
|
|
|
++- Thread safety.
|
|
|
|
++- Connection pooling.
|
|
|
|
++- Client-side SSL/TLS verification.
|
|
|
|
++- File uploads with multipart encoding.
|
|
|
|
++- Helpers for retrying requests and dealing with HTTP redirects.
|
|
|
|
++- Support for gzip, deflate, and brotli encoding.
|
|
|
|
++- Proxy support for HTTP and SOCKS.
|
|
|
|
++- 100% test coverage.
|
|
|
|
++
|
|
|
|
++urllib3 is powerful and easy to use:
|
|
|
|
++
|
|
|
|
++.. code-block:: python
|
|
|
|
++
|
|
|
|
++ >>> import urllib3
|
|
|
|
++ >>> http = urllib3.PoolManager()
|
|
|
|
++ >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
|
|
|
|
++ >>> r.status
|
|
|
|
++ 200
|
|
|
|
++ >>> r.data
|
|
|
|
++ 'User-agent: *\nDisallow: /deny\n'
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++Installing
|
|
|
|
++----------
|
|
|
|
++
|
|
|
|
++urllib3 can be installed with `pip <https://pip.pypa.io>`_::
|
|
|
|
++
|
|
|
|
++ $ python -m pip install urllib3
|
|
|
|
++
|
|
|
|
++Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
|
|
|
|
++
|
|
|
|
++ $ git clone git://github.com/urllib3/urllib3.git
|
|
|
|
++ $ python setup.py install
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++Documentation
|
|
|
|
++-------------
|
|
|
|
++
|
|
|
|
++urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++Contributing
|
|
|
|
++------------
|
|
|
|
++
|
|
|
|
++urllib3 happily accepts contributions. Please see our
|
|
|
|
++`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
|
|
|
|
++for some tips on getting started.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++Security Disclosures
|
|
|
|
++--------------------
|
|
|
|
++
|
|
|
|
++To report a security vulnerability, please use the
|
|
|
|
++`Tidelift security contact <https://tidelift.com/security>`_.
|
|
|
|
++Tidelift will coordinate the fix and disclosure with maintainers.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++Maintainers
|
|
|
|
++-----------
|
|
|
|
++
|
|
|
|
++- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
|
|
|
|
++- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
|
|
|
|
++- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
|
|
|
|
++- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
|
|
|
|
++- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
|
|
|
|
++- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
|
|
|
|
++- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
|
|
|
|
++
|
|
|
|
++👋
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++Sponsorship
|
|
|
|
++-----------
|
|
|
|
++
|
|
|
|
++If your company benefits from this library, please consider `sponsoring its
|
|
|
|
++development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++For Enterprise
|
|
|
|
++--------------
|
|
|
|
++
|
|
|
|
++.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
|
|
|
|
++ :width: 75
|
|
|
|
++ :alt: Tidelift
|
|
|
|
++
|
|
|
|
++.. list-table::
|
|
|
|
++ :widths: 10 100
|
|
|
|
++
|
|
|
|
++ * - |tideliftlogo|
|
|
|
|
++ - Professional support for urllib3 is available as part of the `Tidelift
|
|
|
|
++ Subscription`_. Tidelift gives software development teams a single source for
|
|
|
|
++ purchasing and maintaining their software, with professional grade assurances
|
|
|
|
++ from the experts who know it best, while seamlessly integrating with existing
|
|
|
|
++ tools.
|
|
|
|
++
|
|
|
|
++.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++Changes
|
|
|
|
++=======
|
|
|
|
++
|
|
|
|
++1.26.0 (2020-11-10)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* **NOTE: urllib3 v2.0 will drop support for Python 2**.
|
|
|
|
++ `Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>`_.
|
|
|
|
++
|
|
|
|
++* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
|
|
|
|
++
|
|
|
|
++* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
|
|
|
|
++ still wish to use TLS earlier than 1.2 without a deprecation warning
|
|
|
|
++ should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002)
|
|
|
|
++ **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail**
|
|
|
|
++
|
|
|
|
++* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST``
|
|
|
|
++ and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``,
|
|
|
|
++ ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)``
|
|
|
|
++ (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed**
|
|
|
|
++
|
|
|
|
++* Added default ``User-Agent`` header to every request (Pull #1750)
|
|
|
|
++
|
|
|
|
++* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``,
|
|
|
|
++ and ``Host`` headers from being automatically emitted with requests (Pull #2018)
|
|
|
|
++
|
|
|
|
++* Collapse ``transfer-encoding: chunked`` request data and framing into
|
|
|
|
++ the same ``socket.send()`` call (Pull #1906)
|
|
|
|
++
|
|
|
|
++* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894)
|
|
|
|
++
|
|
|
|
++* Properly terminate SecureTransport connections when CA verification fails (Pull #1977)
|
|
|
|
++
|
|
|
|
++* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None``
|
|
|
|
++ to SecureTransport (Pull #1903)
|
|
|
|
++
|
|
|
|
++* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970)
|
|
|
|
++
|
|
|
|
++* Suppress ``BrokenPipeError`` when writing request body after the server
|
|
|
|
++ has closed the socket (Pull #1524)
|
|
|
|
++
|
|
|
|
++* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC")
|
|
|
|
++ into an ``urllib3.exceptions.SSLError`` (Pull #1939)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.11 (2020-10-19)
|
|
|
|
++--------------------
|
|
|
|
++
|
|
|
|
++* Fix retry backoff time parsed from ``Retry-After`` header when given
|
|
|
|
++ in the HTTP date format. The HTTP date was parsed as the local timezone
|
|
|
|
++ rather than accounting for the timezone in the HTTP date (typically
|
|
|
|
++ UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949)
|
|
|
|
++
|
|
|
|
++* Fix issue where an error would be raised when the ``SSLKEYLOGFILE``
|
|
|
|
++ environment variable was set to the empty string. Now ``SSLContext.keylog_file``
|
|
|
|
++ is not set in this situation (Pull #2016)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.10 (2020-07-22)
|
|
|
|
++--------------------
|
|
|
|
++
|
|
|
|
++* Added support for ``SSLKEYLOGFILE`` environment variable for
|
|
|
|
++ logging TLS session keys with use with programs like
|
|
|
|
++ Wireshark for decrypting captured web traffic (Pull #1867)
|
|
|
|
++
|
|
|
|
++* Fixed loading of SecureTransport libraries on macOS Big Sur
|
|
|
|
++ due to the new dynamic linker cache (Pull #1905)
|
|
|
|
++
|
|
|
|
++* Collapse chunked request bodies data and framing into one
|
|
|
|
++ call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906)
|
|
|
|
++
|
|
|
|
++* Don't insert ``None`` into ``ConnectionPool`` if the pool
|
|
|
|
++ was empty when requesting a connection (Pull #1866)
|
|
|
|
++
|
|
|
|
++* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.9 (2020-04-16)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Added ``InvalidProxyConfigurationWarning`` which is raised when
|
|
|
|
++ erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
|
|
|
|
++ support connecting to HTTPS proxies but will soon be able to
|
|
|
|
++ and we would like users to migrate properly without much breakage.
|
|
|
|
++
|
|
|
|
++ See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
|
|
|
|
++ for more information on how to fix your proxy config. (Pull #1851)
|
|
|
|
++
|
|
|
|
++* Drain connection after ``PoolManager`` redirect (Pull #1817)
|
|
|
|
++
|
|
|
|
++* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
|
|
|
|
++
|
|
|
|
++* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
|
|
|
|
++
|
|
|
|
++* Allow the CA certificate data to be passed as a string (Pull #1804)
|
|
|
|
++
|
|
|
|
++* Raise ``ValueError`` if method contains control characters (Pull #1800)
|
|
|
|
++
|
|
|
|
++* Add ``__repr__`` to ``Timeout`` (Pull #1795)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.8 (2020-01-20)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Drop support for EOL Python 3.4 (Pull #1774)
|
|
|
|
++
|
|
|
|
++* Optimize _encode_invalid_chars (Pull #1787)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.7 (2019-11-11)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
|
|
|
|
++
|
|
|
|
++* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
|
|
|
|
++
|
|
|
|
++* Fix issue where URL fragment was sent within the request target. (Pull #1732)
|
|
|
|
++
|
|
|
|
++* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
|
|
|
|
++
|
|
|
|
++* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.6 (2019-09-24)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Fix issue where tilde (``~``) characters were incorrectly
|
|
|
|
++ percent-encoded in the path. (Pull #1692)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.5 (2019-09-19)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
|
|
|
|
++ caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
|
|
|
|
++ (Issue #1682)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.4 (2019-09-19)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
|
|
|
|
++
|
|
|
|
++* Fix edge case where Retry-After header was still respected even when
|
|
|
|
++ explicitly opted out of. (Pull #1607)
|
|
|
|
++
|
|
|
|
++* Remove dependency on ``rfc3986`` for URL parsing.
|
|
|
|
++
|
|
|
|
++* Fix issue where URLs containing invalid characters within ``Url.auth`` would
|
|
|
|
++ raise an exception instead of percent-encoding those characters.
|
|
|
|
++
|
|
|
|
++* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
|
|
|
|
++ work well with BufferedReaders and other ``io`` module features. (Pull #1652)
|
|
|
|
++
|
|
|
|
++* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.3 (2019-05-23)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Change ``HTTPSConnection`` to load system CA certificates
|
|
|
|
++ when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
|
|
|
|
++ unspecified. (Pull #1608, Issue #1603)
|
|
|
|
++
|
|
|
|
++* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.2 (2019-04-28)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
|
|
|
|
++
|
|
|
|
++* Change ``parse_url`` to percent-encode invalid characters within the
|
|
|
|
++ path, query, and target components. (Pull #1586)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25.1 (2019-04-24)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
|
|
|
|
++
|
|
|
|
++* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.25 (2019-04-22)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Require and validate certificates by default when using HTTPS (Pull #1507)
|
|
|
|
++
|
|
|
|
++* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
|
|
|
|
++
|
|
|
|
++* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
|
|
|
|
++ encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
|
|
|
|
++
|
|
|
|
++* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
|
|
|
|
++ implementations. (Pull #1496)
|
|
|
|
++
|
|
|
|
++* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492)
|
|
|
|
++
|
|
|
|
++* Fixed issue where OpenSSL would block if an encrypted client private key was
|
|
|
|
++ given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
|
|
|
|
++
|
|
|
|
++* Added support for Brotli content encoding. It is enabled automatically if
|
|
|
|
++ ``brotlipy`` package is installed which can be requested with
|
|
|
|
++ ``urllib3[brotli]`` extra. (Pull #1532)
|
|
|
|
++
|
|
|
|
++* Drop ciphers using DSS key exchange from default TLS cipher suites.
|
|
|
|
++ Improve default ciphers when using SecureTransport. (Pull #1496)
|
|
|
|
++
|
|
|
|
++* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
|
|
|
|
++
|
|
|
|
++1.24.3 (2019-05-01)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Apply fix for CVE-2019-9740. (Pull #1591)
|
|
|
|
++
|
|
|
|
++1.24.2 (2019-04-17)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
|
|
|
|
++ ``ssl_context`` parameters are specified.
|
|
|
|
++
|
|
|
|
++* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
|
|
|
|
++
|
|
|
|
++* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.24.1 (2018-11-02)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
|
|
|
|
++
|
|
|
|
++* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.24 (2018-10-16)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
|
|
|
|
++
|
|
|
|
++* Test against Python 3.7 on AppVeyor. (Pull #1453)
|
|
|
|
++
|
|
|
|
++* Early-out ipv6 checks when running on App Engine. (Pull #1450)
|
|
|
|
++
|
|
|
|
++* Change ambiguous description of backoff_factor (Pull #1436)
|
|
|
|
++
|
|
|
|
++* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
|
|
|
|
++
|
|
|
|
++* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
|
|
|
|
++
|
|
|
|
++* Add a server_hostname parameter to HTTPSConnection which allows for
|
|
|
|
++ overriding the SNI hostname sent in the handshake. (Pull #1397)
|
|
|
|
++
|
|
|
|
++* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
|
|
|
|
++
|
|
|
|
++* Fixed bug where responses with header Content-Type: message/* erroneously
|
|
|
|
++ raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
|
|
|
|
++
|
|
|
|
++* Move urllib3 to src/urllib3 (Pull #1409)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.23 (2018-06-04)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Allow providing a list of headers to strip from requests when redirecting
|
|
|
|
++ to a different host. Defaults to the ``Authorization`` header. Different
|
|
|
|
++ headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
|
|
|
|
++
|
|
|
|
++* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
|
|
|
|
++
|
|
|
|
++* Dropped Python 3.3 support. (Pull #1242)
|
|
|
|
++
|
|
|
|
++* Put the connection back in the pool when calling stream() or read_chunked() on
|
|
|
|
++ a chunked HEAD response. (Issue #1234)
|
|
|
|
++
|
|
|
|
++* Fixed pyOpenSSL-specific ssl client authentication issue when clients
|
|
|
|
++ attempted to auth via certificate + chain (Issue #1060)
|
|
|
|
++
|
|
|
|
++* Add the port to the connectionpool connect print (Pull #1251)
|
|
|
|
++
|
|
|
|
++* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
|
|
|
|
++
|
|
|
|
++* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
|
|
|
|
++
|
|
|
|
++* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
|
|
|
|
++
|
|
|
|
++* Added support for auth info in url for SOCKS proxy (Pull #1363)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.22 (2017-07-20)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
|
|
|
|
++ IPv6 proxy. (Issue #1222)
|
|
|
|
++
|
|
|
|
++* Made the connection pool retry on ``SSLError``. The original ``SSLError``
|
|
|
|
++ is available on ``MaxRetryError.reason``. (Issue #1112)
|
|
|
|
++
|
|
|
|
++* Drain and release connection before recursing on retry/redirect. Fixes
|
|
|
|
++ deadlocks with a blocking connectionpool. (Issue #1167)
|
|
|
|
++
|
|
|
|
++* Fixed compatibility for cookiejar. (Issue #1229)
|
|
|
|
++
|
|
|
|
++* pyopenssl: Use vendored version of ``six``. (Issue #1231)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.21.1 (2017-05-02)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Fixed SecureTransport issue that would cause long delays in response body
|
|
|
|
++ delivery. (Pull #1154)
|
|
|
|
++
|
|
|
|
++* Fixed regression in 1.21 that threw exceptions when users passed the
|
|
|
|
++ ``socket_options`` flag to the ``PoolManager``. (Issue #1165)
|
|
|
|
++
|
|
|
|
++* Fixed regression in 1.21 that threw exceptions when users passed the
|
|
|
|
++ ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
|
|
|
|
++ (Pull #1157)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.21 (2017-04-25)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Improved performance of certain selector system calls on Python 3.5 and
|
|
|
|
++ later. (Pull #1095)
|
|
|
|
++
|
|
|
|
++* Resolved issue where the PyOpenSSL backend would not wrap SysCallError
|
|
|
|
++ exceptions appropriately when sending data. (Pull #1125)
|
|
|
|
++
|
|
|
|
++* Selectors now detects a monkey-patched select module after import for modules
|
|
|
|
++ that patch the select module like eventlet, greenlet. (Pull #1128)
|
|
|
|
++
|
|
|
|
++* Reduced memory consumption when streaming zlib-compressed responses
|
|
|
|
++ (as opposed to raw deflate streams). (Pull #1129)
|
|
|
|
++
|
|
|
|
++* Connection pools now use the entire request context when constructing the
|
|
|
|
++ pool key. (Pull #1016)
|
|
|
|
++
|
|
|
|
++* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
|
|
|
|
++ ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
|
|
|
|
++ (Pull #1016)
|
|
|
|
++
|
|
|
|
++* Add retry counter for ``status_forcelist``. (Issue #1147)
|
|
|
|
++
|
|
|
|
++* Added ``contrib`` module for using SecureTransport on macOS:
|
|
|
|
++ ``urllib3.contrib.securetransport``. (Pull #1122)
|
|
|
|
++
|
|
|
|
++* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
|
|
|
|
++ for schemes it does not recognise, it assumes they are case-sensitive and
|
|
|
|
++ leaves them unchanged.
|
|
|
|
++ (Issue #1080)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.20 (2017-01-19)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Added support for waiting for I/O using selectors other than select,
|
|
|
|
++ improving urllib3's behaviour with large numbers of concurrent connections.
|
|
|
|
++ (Pull #1001)
|
|
|
|
++
|
|
|
|
++* Updated the date for the system clock check. (Issue #1005)
|
|
|
|
++
|
|
|
|
++* ConnectionPools now correctly consider hostnames to be case-insensitive.
|
|
|
|
++ (Issue #1032)
|
|
|
|
++
|
|
|
|
++* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
|
|
|
|
++ to fail when it is injected, rather than at first use. (Pull #1063)
|
|
|
|
++
|
|
|
|
++* Outdated versions of cryptography now cause the PyOpenSSL contrib module
|
|
|
|
++ to fail when it is injected, rather than at first use. (Issue #1044)
|
|
|
|
++
|
|
|
|
++* Automatically attempt to rewind a file-like body object when a request is
|
|
|
|
++ retried or redirected. (Pull #1039)
|
|
|
|
++
|
|
|
|
++* Fix some bugs that occur when modules incautiously patch the queue module.
|
|
|
|
++ (Pull #1061)
|
|
|
|
++
|
|
|
|
++* Prevent retries from occurring on read timeouts for which the request method
|
|
|
|
++ was not in the method whitelist. (Issue #1059)
|
|
|
|
++
|
|
|
|
++* Changed the PyOpenSSL contrib module to lazily load idna to avoid
|
|
|
|
++ unnecessarily bloating the memory of programs that don't need it. (Pull
|
|
|
|
++ #1076)
|
|
|
|
++
|
|
|
|
++* Add support for IPv6 literals with zone identifiers. (Pull #1013)
|
|
|
|
++
|
|
|
|
++* Added support for socks5h:// and socks4a:// schemes when working with SOCKS
|
|
|
|
++ proxies, and controlled remote DNS appropriately. (Issue #1035)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.19.1 (2016-11-16)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.19 (2016-11-03)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
|
|
|
|
++ using the default retry logic. (Pull #955)
|
|
|
|
++
|
|
|
|
++* Remove markers from setup.py to assist ancient setuptools versions. (Issue
|
|
|
|
++ #986)
|
|
|
|
++
|
|
|
|
++* Disallow superscripts and other integerish things in URL ports. (Issue #989)
|
|
|
|
++
|
|
|
|
++* Allow urllib3's HTTPResponse.stream() method to continue to work with
|
|
|
|
++ non-httplib underlying FPs. (Pull #990)
|
|
|
|
++
|
|
|
|
++* Empty filenames in multipart headers are now emitted as such, rather than
|
|
|
|
++ being suppressed. (Issue #1015)
|
|
|
|
++
|
|
|
|
++* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.18.1 (2016-10-27)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
|
|
|
|
++ PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
|
|
|
|
++ release fixes a vulnerability whereby urllib3 in the above configuration
|
|
|
|
++ would silently fail to validate TLS certificates due to erroneously setting
|
|
|
|
++ invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
|
|
|
|
++ flags do not cause a problem in OpenSSL versions before 1.1.0, which
|
|
|
|
++ interprets the presence of any flag as requesting certificate validation.
|
|
|
|
++
|
|
|
|
++ There is no PR for this patch, as it was prepared for simultaneous disclosure
|
|
|
|
++ and release. The master branch received the same fix in Pull #1010.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.18 (2016-09-26)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Fixed incorrect message for IncompleteRead exception. (Pull #973)
|
|
|
|
++
|
|
|
|
++* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
|
|
|
|
++ (Issue #258)
|
|
|
|
++
|
|
|
|
++* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
|
|
|
|
++ (Issue #977)
|
|
|
|
++
|
|
|
|
++* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.17 (2016-09-06)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
|
|
|
|
++
|
|
|
|
++* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
|
|
|
|
++
|
|
|
|
++* Substantially refactored documentation. (Issue #887)
|
|
|
|
++
|
|
|
|
++* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
|
|
|
|
++ (Issue #858)
|
|
|
|
++
|
|
|
|
++* Normalize the scheme and host in the URL parser (Issue #833)
|
|
|
|
++
|
|
|
|
++* ``HTTPResponse`` contains the last ``Retry`` object, which now also
|
|
|
|
++ contains retries history. (Issue #848)
|
|
|
|
++
|
|
|
|
++* Timeout can no longer be set as boolean, and must be greater than zero.
|
|
|
|
++ (Pull #924)
|
|
|
|
++
|
|
|
|
++* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
|
|
|
|
++ now use cryptography and idna, both of which are already dependencies of
|
|
|
|
++ PyOpenSSL. (Pull #930)
|
|
|
|
++
|
|
|
|
++* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
|
|
|
|
++
|
|
|
|
++* Try to use the operating system's certificates when we are using an
|
|
|
|
++ ``SSLContext``. (Pull #941)
|
|
|
|
++
|
|
|
|
++* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
|
|
|
|
++ ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947)
|
|
|
|
++
|
|
|
|
++* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958)
|
|
|
|
++
|
|
|
|
++* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958)
|
|
|
|
++
|
|
|
|
++* Implemented ``length_remaining`` to determine remaining content
|
|
|
|
++ to be read. (Pull #949)
|
|
|
|
++
|
|
|
|
++* Implemented ``enforce_content_length`` to enable exceptions when
|
|
|
|
++ incomplete data chunks are received. (Pull #949)
|
|
|
|
++
|
|
|
|
++* Dropped connection start, dropped connection reset, redirect, forced retry,
|
|
|
|
++ and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.16 (2016-06-11)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
|
|
|
|
++
|
|
|
|
++* Provide ``key_fn_by_scheme`` pool keying mechanism that can be
|
|
|
|
++ overridden. (Issue #830)
|
|
|
|
++
|
|
|
|
++* Normalize scheme and host to lowercase for pool keys, and include
|
|
|
|
++ ``source_address``. (Issue #830)
|
|
|
|
++
|
|
|
|
++* Cleaner exception chain in Python 3 for ``_make_request``.
|
|
|
|
++ (Issue #861)
|
|
|
|
++
|
|
|
|
++* Fixed installing ``urllib3[socks]`` extra. (Issue #864)
|
|
|
|
++
|
|
|
|
++* Fixed signature of ``ConnectionPool.close`` so it can actually safely be
|
|
|
|
++ called by subclasses. (Issue #873)
|
|
|
|
++
|
|
|
|
++* Retain ``release_conn`` state across retries. (Issues #651, #866)
|
|
|
|
++
|
|
|
|
++* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
|
|
|
|
++ ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.15.1 (2016-04-11)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Fix packaging to include backports module. (Issue #841)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.15 (2016-04-06)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Added Retry(raise_on_status=False). (Issue #720)
|
|
|
|
++
|
|
|
|
++* Always use setuptools, no more distutils fallback. (Issue #785)
|
|
|
|
++
|
|
|
|
++* Dropped support for Python 3.2. (Issue #786)
|
|
|
|
++
|
|
|
|
++* Chunked transfer encoding when requesting with ``chunked=True``.
|
|
|
|
++ (Issue #790)
|
|
|
|
++
|
|
|
|
++* Fixed regression with IPv6 port parsing. (Issue #801)
|
|
|
|
++
|
|
|
|
++* Append SNIMissingWarning messages to allow users to specify it in
|
|
|
|
++ the PYTHONWARNINGS environment variable. (Issue #816)
|
|
|
|
++
|
|
|
|
++* Handle unicode headers in Py2. (Issue #818)
|
|
|
|
++
|
|
|
|
++* Log certificate when there is a hostname mismatch. (Issue #820)
|
|
|
|
++
|
|
|
|
++* Preserve order of request/response headers. (Issue #821)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.14 (2015-12-29)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* contrib: SOCKS proxy support! (Issue #762)
|
|
|
|
++
|
|
|
|
++* Fixed AppEngine handling of transfer-encoding header and bug
|
|
|
|
++ in Timeout defaults checking. (Issue #763)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.13.1 (2015-12-18)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.13 (2015-12-14)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
|
|
|
|
++
|
|
|
|
++* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
|
|
|
|
++
|
|
|
|
++* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
|
|
|
|
++
|
|
|
|
++* Close connections more defensively on exception. (Issue #734)
|
|
|
|
++
|
|
|
|
++* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
|
|
|
|
++ repeatedly flushing the decoder, to function better on Jython. (Issue #743)
|
|
|
|
++
|
|
|
|
++* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.12 (2015-09-03)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Rely on ``six`` for importing ``httplib`` to work around
|
|
|
|
++ conflicts with other Python 3 shims. (Issue #688)
|
|
|
|
++
|
|
|
|
++* Add support for directories of certificate authorities, as supported by
|
|
|
|
++ OpenSSL. (Issue #701)
|
|
|
|
++
|
|
|
|
++* New exception: ``NewConnectionError``, raised when we fail to establish
|
|
|
|
++ a new connection, usually ``ECONNREFUSED`` socket error.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.11 (2015-07-21)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* When ``ca_certs`` is given, ``cert_reqs`` defaults to
|
|
|
|
++ ``'CERT_REQUIRED'``. (Issue #650)
|
|
|
|
++
|
|
|
|
++* ``pip install urllib3[secure]`` will install Certifi and
|
|
|
|
++ PyOpenSSL as dependencies. (Issue #678)
|
|
|
|
++
|
|
|
|
++* Made ``HTTPHeaderDict`` usable as a ``headers`` input value
|
|
|
|
++ (Issues #632, #679)
|
|
|
|
++
|
|
|
|
++* Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
|
|
|
|
++ which has an ``AppEngineManager`` for using ``URLFetch`` in a
|
|
|
|
++ Google AppEngine environment. (Issue #664)
|
|
|
|
++
|
|
|
|
++* Dev: Added test suite for AppEngine. (Issue #631)
|
|
|
|
++
|
|
|
|
++* Fix performance regression when using PyOpenSSL. (Issue #626)
|
|
|
|
++
|
|
|
|
++* Passing incorrect scheme (e.g. ``foo://``) will raise
|
|
|
|
++ ``ValueError`` instead of ``AssertionError`` (backwards
|
|
|
|
++ compatible for now, but please migrate). (Issue #640)
|
|
|
|
++
|
|
|
|
++* Fix pools not getting replenished when an error occurs during a
|
|
|
|
++ request using ``release_conn=False``. (Issue #644)
|
|
|
|
++
|
|
|
|
++* Fix pool-default headers not applying for url-encoded requests
|
|
|
|
++ like GET. (Issue #657)
|
|
|
|
++
|
|
|
|
++* log.warning in Python 3 when headers are skipped due to parsing
|
|
|
|
++ errors. (Issue #642)
|
|
|
|
++
|
|
|
|
++* Close and discard connections if an error occurs during read.
|
|
|
|
++ (Issue #660)
|
|
|
|
++
|
|
|
|
++* Fix host parsing for IPv6 proxies. (Issue #668)
|
|
|
|
++
|
|
|
|
++* Separate warning type SubjectAltNameWarning, now issued once
|
|
|
|
++ per host. (Issue #671)
|
|
|
|
++
|
|
|
|
++* Fix ``httplib.IncompleteRead`` not getting converted to
|
|
|
|
++ ``ProtocolError`` when using ``HTTPResponse.stream()``
|
|
|
|
++ (Issue #674)
|
|
|
|
++
|
|
|
|
++1.10.4 (2015-05-03)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Migrate tests to Tornado 4. (Issue #594)
|
|
|
|
++
|
|
|
|
++* Append default warning configuration rather than overwrite.
|
|
|
|
++ (Issue #603)
|
|
|
|
++
|
|
|
|
++* Fix streaming decoding regression. (Issue #595)
|
|
|
|
++
|
|
|
|
++* Fix chunked requests losing state across keep-alive connections.
|
|
|
|
++ (Issue #599)
|
|
|
|
++
|
|
|
|
++* Fix hanging when chunked HEAD response has no body. (Issue #605)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.10.3 (2015-04-21)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
|
|
|
|
++ (Issue #558)
|
|
|
|
++
|
|
|
|
++* Fix regression of duplicate header keys being discarded.
|
|
|
|
++ (Issue #563)
|
|
|
|
++
|
|
|
|
++* ``Response.stream()`` returns a generator for chunked responses.
|
|
|
|
++ (Issue #560)
|
|
|
|
++
|
|
|
|
++* Set upper-bound timeout when waiting for a socket in PyOpenSSL.
|
|
|
|
++ (Issue #585)
|
|
|
|
++
|
|
|
|
++* Work on platforms without `ssl` module for plain HTTP requests.
|
|
|
|
++ (Issue #587)
|
|
|
|
++
|
|
|
|
++* Stop relying on the stdlib's default cipher list. (Issue #588)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.10.2 (2015-02-25)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Fix file descriptor leakage on retries. (Issue #548)
|
|
|
|
++
|
|
|
|
++* Removed RC4 from default cipher list. (Issue #551)
|
|
|
|
++
|
|
|
|
++* Header performance improvements. (Issue #544)
|
|
|
|
++
|
|
|
|
++* Fix PoolManager not obeying redirect retry settings. (Issue #553)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.10.1 (2015-02-10)
|
|
|
|
++-------------------
|
|
|
|
++
|
|
|
|
++* Pools can be used as context managers. (Issue #545)
|
|
|
|
++
|
|
|
|
++* Don't re-use connections which experienced an SSLError. (Issue #529)
|
|
|
|
++
|
|
|
|
++* Don't fail when gzip decoding an empty stream. (Issue #535)
|
|
|
|
++
|
|
|
|
++* Add sha256 support for fingerprint verification. (Issue #540)
|
|
|
|
++
|
|
|
|
++* Fixed handling of header values containing commas. (Issue #533)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.10 (2014-12-14)
|
|
|
|
++-----------------
|
|
|
|
++
|
|
|
|
++* Disabled SSLv3. (Issue #473)
|
|
|
|
++
|
|
|
|
++* Add ``Url.url`` property to return the composed url string. (Issue #394)
|
|
|
|
++
|
|
|
|
++* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
|
|
|
|
++
|
|
|
|
++* ``MaxRetryError.reason`` will always be an exception, not string.
|
|
|
|
++ (Issue #481)
|
|
|
|
++
|
|
|
|
++* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
|
|
|
|
++
|
|
|
|
++* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
|
|
|
|
++
|
|
|
|
++* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
|
|
|
|
++ (Issue #496)
|
|
|
|
++
|
|
|
|
++* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
|
|
|
|
++ (Issue #499)
|
|
|
|
++
|
|
|
|
++* Close and discard sockets which experienced SSL-related errors.
|
|
|
|
++ (Issue #501)
|
|
|
|
++
|
|
|
|
++* Handle ``body`` param in ``.request(...)``. (Issue #513)
|
|
|
|
++
|
|
|
|
++* Respect timeout with HTTPS proxy. (Issue #505)
|
|
|
|
++
|
|
|
|
++* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.9.1 (2014-09-13)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Apply socket arguments before binding. (Issue #427)
|
|
|
|
++
|
|
|
|
++* More careful checks if fp-like object is closed. (Issue #435)
|
|
|
|
++
|
|
|
|
++* Fixed packaging issues of some development-related files not
|
|
|
|
++ getting included. (Issue #440)
|
|
|
|
++
|
|
|
|
++* Allow performing *only* fingerprint verification. (Issue #444)
|
|
|
|
++
|
|
|
|
++* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
|
|
|
|
++
|
|
|
|
++* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
|
|
|
|
++
|
|
|
|
++* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
|
|
|
|
++ (Issue #443)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.9 (2014-07-04)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Shuffled around development-related files. If you're maintaining a distro
|
|
|
|
++ package of urllib3, you may need to tweak things. (Issue #415)
|
|
|
|
++
|
|
|
|
++* Unverified HTTPS requests will trigger a warning on the first request. See
|
|
|
|
++ our new `security documentation
|
|
|
|
++ <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
|
|
|
|
++ (Issue #426)
|
|
|
|
++
|
|
|
|
++* New retry logic and ``urllib3.util.retry.Retry`` configuration object.
|
|
|
|
++ (Issue #326)
|
|
|
|
++
|
|
|
|
++* All raised exceptions should now wrapped in a
|
|
|
|
++ ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
|
|
|
|
++
|
|
|
|
++* All errors during a retry-enabled request should be wrapped in
|
|
|
|
++ ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
|
|
|
|
++ which were previously exempt. Underlying error is accessible from the
|
|
|
|
++ ``.reason`` property. (Issue #326)
|
|
|
|
++
|
|
|
|
++* ``urllib3.exceptions.ConnectionError`` renamed to
|
|
|
|
++ ``urllib3.exceptions.ProtocolError``. (Issue #326)
|
|
|
|
++
|
|
|
|
++* Errors during response read (such as IncompleteRead) are now wrapped in
|
|
|
|
++ ``urllib3.exceptions.ProtocolError``. (Issue #418)
|
|
|
|
++
|
|
|
|
++* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
|
|
|
|
++ (Issue #417)
|
|
|
|
++
|
|
|
|
++* Catch read timeouts over SSL connections as
|
|
|
|
++ ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
|
|
|
|
++
|
|
|
|
++* Apply socket arguments before connecting. (Issue #427)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.8.3 (2014-06-23)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
|
|
|
|
++
|
|
|
|
++* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
|
|
|
|
++
|
|
|
|
++* Wrap ``socket.timeout`` exception with
|
|
|
|
++ ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
|
|
|
|
++
|
|
|
|
++* Fixed proxy-related bug where connections were being reused incorrectly.
|
|
|
|
++ (Issues #366, #369)
|
|
|
|
++
|
|
|
|
++* Added ``socket_options`` keyword parameter which allows to define
|
|
|
|
++ ``setsockopt`` configuration of new sockets. (Issue #397)
|
|
|
|
++
|
|
|
|
++* Removed ``HTTPConnection.tcp_nodelay`` in favor of
|
|
|
|
++ ``HTTPConnection.default_socket_options``. (Issue #397)
|
|
|
|
++
|
|
|
|
++* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.8.2 (2014-04-17)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Fix ``urllib3.util`` not being included in the package.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.8.1 (2014-04-17)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
|
|
|
|
++
|
|
|
|
++* Don't install ``dummyserver`` into ``site-packages`` as it's only needed
|
|
|
|
++ for the test suite. (Issue #362)
|
|
|
|
++
|
|
|
|
++* Added support for specifying ``source_address``. (Issue #352)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.8 (2014-03-04)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
|
|
|
|
++ username, and blank ports like 'hostname:').
|
|
|
|
++
|
|
|
|
++* New ``urllib3.connection`` module which contains all the HTTPConnection
|
|
|
|
++ objects.
|
|
|
|
++
|
|
|
|
++* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
|
|
|
|
++ signature to a more sensible order. [Backwards incompatible]
|
|
|
|
++ (Issues #252, #262, #263)
|
|
|
|
++
|
|
|
|
++* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
|
|
|
|
++
|
|
|
|
++* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
|
|
|
|
++ returns the number of bytes read so far. (Issue #277)
|
|
|
|
++
|
|
|
|
++* Support for platforms without threading. (Issue #289)
|
|
|
|
++
|
|
|
|
++* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
|
|
|
|
++ to allow a pool with no specified port to be considered equal to to an
|
|
|
|
++ HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
|
|
|
|
++
|
|
|
|
++* Improved default SSL/TLS settings to avoid vulnerabilities.
|
|
|
|
++ (Issue #309)
|
|
|
|
++
|
|
|
|
++* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
|
|
|
|
++ (Issue #310)
|
|
|
|
++
|
|
|
|
++* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
|
|
|
|
++ will send the entire HTTP request ~200 milliseconds faster; however, some of
|
|
|
|
++ the resulting TCP packets will be smaller. (Issue #254)
|
|
|
|
++
|
|
|
|
++* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
|
|
|
|
++ from the default 64 to 1024 in a single certificate. (Issue #318)
|
|
|
|
++
|
|
|
|
++* Headers are now passed and stored as a custom
|
|
|
|
++ ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
|
|
|
|
++ (Issue #329, #333)
|
|
|
|
++
|
|
|
|
++* Headers no longer lose their case on Python 3. (Issue #236)
|
|
|
|
++
|
|
|
|
++* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
|
|
|
|
++ certificates on inject. (Issue #332)
|
|
|
|
++
|
|
|
|
++* Requests with ``retries=False`` will immediately raise any exceptions without
|
|
|
|
++ wrapping them in ``MaxRetryError``. (Issue #348)
|
|
|
|
++
|
|
|
|
++* Fixed open socket leak with SSL-related failures. (Issue #344, #348)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.7.1 (2013-09-25)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Added granular timeout support with new ``urllib3.util.Timeout`` class.
|
|
|
|
++ (Issue #231)
|
|
|
|
++
|
|
|
|
++* Fixed Python 3.4 support. (Issue #238)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.7 (2013-08-14)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* More exceptions are now pickle-able, with tests. (Issue #174)
|
|
|
|
++
|
|
|
|
++* Fixed redirecting with relative URLs in Location header. (Issue #178)
|
|
|
|
++
|
|
|
|
++* Support for relative urls in ``Location: ...`` header. (Issue #179)
|
|
|
|
++
|
|
|
|
++* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
|
|
|
|
++ file-like functionality. (Issue #187)
|
|
|
|
++
|
|
|
|
++* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
|
|
|
|
++ skip hostname verification for SSL connections. (Issue #194)
|
|
|
|
++
|
|
|
|
++* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
|
|
|
|
++ generator wrapped around ``.read(...)``. (Issue #198)
|
|
|
|
++
|
|
|
|
++* IPv6 url parsing enforces brackets around the hostname. (Issue #199)
|
|
|
|
++
|
|
|
|
++* Fixed thread race condition in
|
|
|
|
++ ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
|
|
|
|
++
|
|
|
|
++* ``ProxyManager`` requests now include non-default port in ``Host: ...``
|
|
|
|
++ header. (Issue #217)
|
|
|
|
++
|
|
|
|
++* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
|
|
|
|
++
|
|
|
|
++* New ``RequestField`` object can be passed to the ``fields=...`` param which
|
|
|
|
++ can specify headers. (Issue #220)
|
|
|
|
++
|
|
|
|
++* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
|
|
|
|
++ (Issue #221)
|
|
|
|
++
|
|
|
|
++* Use international headers when posting file names. (Issue #119)
|
|
|
|
++
|
|
|
|
++* Improved IPv6 support. (Issue #203)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.6 (2013-04-25)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
|
|
|
|
++
|
|
|
|
++* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
|
|
|
|
++
|
|
|
|
++* Improved SSL-related code. ``cert_req`` now optionally takes a string like
|
|
|
|
++ "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
|
|
|
|
++ The string values reflect the suffix of the respective constant variable.
|
|
|
|
++ (Issue #130)
|
|
|
|
++
|
|
|
|
++* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
|
|
|
|
++ closed proxy connections and larger read buffers. (Issue #135)
|
|
|
|
++
|
|
|
|
++* Ensure the connection is closed if no data is received, fixes connection leak
|
|
|
|
++ on some platforms. (Issue #133)
|
|
|
|
++
|
|
|
|
++* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
|
|
|
|
++
|
|
|
|
++* Tests fixed to be compatible with Py26 again. (Issue #125)
|
|
|
|
++
|
|
|
|
++* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
|
|
|
|
++ to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
|
|
|
|
++
|
|
|
|
++* Allow an explicit content type to be specified when encoding file fields.
|
|
|
|
++ (Issue #126)
|
|
|
|
++
|
|
|
|
++* Exceptions are now pickleable, with tests. (Issue #101)
|
|
|
|
++
|
|
|
|
++* Fixed default headers not getting passed in some cases. (Issue #99)
|
|
|
|
++
|
|
|
|
++* Treat "content-encoding" header value as case-insensitive, per RFC 2616
|
|
|
|
++ Section 3.5. (Issue #110)
|
|
|
|
++
|
|
|
|
++* "Connection Refused" SocketErrors will get retried rather than raised.
|
|
|
|
++ (Issue #92)
|
|
|
|
++
|
|
|
|
++* Updated vendored ``six``, no longer overrides the global ``six`` module
|
|
|
|
++ namespace. (Issue #113)
|
|
|
|
++
|
|
|
|
++* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
|
|
|
|
++ the exception that prompted the final retry. If ``reason is None`` then it
|
|
|
|
++ was due to a redirect. (Issue #92, #114)
|
|
|
|
++
|
|
|
|
++* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
|
|
|
|
++ (Issue #149)
|
|
|
|
++
|
|
|
|
++* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
|
|
|
|
++ that are not files. (Issue #111)
|
|
|
|
++
|
|
|
|
++* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
|
|
|
|
++
|
|
|
|
++* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
|
|
|
|
++ against an arbitrary hostname (when connecting by IP or for misconfigured
|
|
|
|
++ servers). (Issue #140)
|
|
|
|
++
|
|
|
|
++* Streaming decompression support. (Issue #159)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.5 (2012-08-02)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
|
|
|
|
++ logging in urllib3.
|
|
|
|
++
|
|
|
|
++* Native full URL parsing (including auth, path, query, fragment) available in
|
|
|
|
++ ``urllib3.util.parse_url(url)``.
|
|
|
|
++
|
|
|
|
++* Built-in redirect will switch method to 'GET' if status code is 303.
|
|
|
|
++ (Issue #11)
|
|
|
|
++
|
|
|
|
++* ``urllib3.PoolManager`` strips the scheme and host before sending the request
|
|
|
|
++ uri. (Issue #8)
|
|
|
|
++
|
|
|
|
++* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
|
|
|
|
++ based on the Content-Type header, fails.
|
|
|
|
++
|
|
|
|
++* Fixed bug with pool depletion and leaking connections (Issue #76). Added
|
|
|
|
++ explicit connection closing on pool eviction. Added
|
|
|
|
++ ``urllib3.PoolManager.clear()``.
|
|
|
|
++
|
|
|
|
++* 99% -> 100% unit test coverage.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.4 (2012-06-16)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Minor AppEngine-related fixes.
|
|
|
|
++
|
|
|
|
++* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
|
|
|
|
++
|
|
|
|
++* Improved url parsing. (Issue #73)
|
|
|
|
++
|
|
|
|
++* IPv6 url support. (Issue #72)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.3 (2012-03-25)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Removed pre-1.0 deprecated API.
|
|
|
|
++
|
|
|
|
++* Refactored helpers into a ``urllib3.util`` submodule.
|
|
|
|
++
|
|
|
|
++* Fixed multipart encoding to support list-of-tuples for keys with multiple
|
|
|
|
++ values. (Issue #48)
|
|
|
|
++
|
|
|
|
++* Fixed multiple Set-Cookie headers in response not getting merged properly in
|
|
|
|
++ Python 3. (Issue #53)
|
|
|
|
++
|
|
|
|
++* AppEngine support with Py27. (Issue #61)
|
|
|
|
++
|
|
|
|
++* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
|
|
|
|
++ bytes.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.2.2 (2012-02-06)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.2.1 (2012-02-05)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
|
|
|
|
++
|
|
|
|
++* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
|
|
|
|
++ which inherits from ``ValueError``.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.2 (2012-01-29)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Added Python 3 support (tested on 3.2.2)
|
|
|
|
++
|
|
|
|
++* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
|
|
|
|
++
|
|
|
|
++* Use ``select.poll`` instead of ``select.select`` for platforms that support
|
|
|
|
++ it.
|
|
|
|
++
|
|
|
|
++* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
|
|
|
|
++ connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
|
|
|
|
++
|
|
|
|
++* Fixed ``ImportError`` during install when ``ssl`` module is not available.
|
|
|
|
++ (Issue #41)
|
|
|
|
++
|
|
|
|
++* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
|
|
|
|
++ completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
|
|
|
|
++
|
|
|
|
++* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
|
|
|
|
++ ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
|
|
|
|
++ Added socket-level tests.
|
|
|
|
++
|
|
|
|
++* More tests. Achievement Unlocked: 99% Coverage.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.1 (2012-01-07)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Refactored ``dummyserver`` to its own root namespace module (used for
|
|
|
|
++ testing).
|
|
|
|
++
|
|
|
|
++* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
|
|
|
|
++ Py32's ``ssl_match_hostname``. (Issue #25)
|
|
|
|
++
|
|
|
|
++* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
|
|
|
|
++
|
|
|
|
++* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
|
|
|
|
++ #27)
|
|
|
|
++
|
|
|
|
++* Fixed timeout-related bugs. (Issues #17, #23)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.0.2 (2011-11-04)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
|
|
|
|
++ you're using the object manually. (Thanks pyos)
|
|
|
|
++
|
|
|
|
++* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
|
|
|
|
++ wrapping the access log in a mutex. (Thanks @christer)
|
|
|
|
++
|
|
|
|
++* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
|
|
|
|
++ ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.0.1 (2011-10-10)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Fixed a bug where the same connection would get returned into the pool twice,
|
|
|
|
++ causing extraneous "HttpConnectionPool is full" log warnings.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++1.0 (2011-10-08)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Added ``PoolManager`` with LRU expiration of connections (tested and
|
|
|
|
++ documented).
|
|
|
|
++* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
|
|
|
|
++ with HTTPS proxies).
|
|
|
|
++* Added optional partial-read support for responses when
|
|
|
|
++ ``preload_content=False``. You can now make requests and just read the headers
|
|
|
|
++ without loading the content.
|
|
|
|
++* Made response decoding optional (default on, same as before).
|
|
|
|
++* Added optional explicit boundary string for ``encode_multipart_formdata``.
|
|
|
|
++* Convenience request methods are now inherited from ``RequestMethods``. Old
|
|
|
|
++ helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
|
|
|
|
++ the new ``request(method, url, ...)``.
|
|
|
|
++* Refactored code to be even more decoupled, reusable, and extendable.
|
|
|
|
++* License header added to ``.py`` files.
|
|
|
|
++* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
|
|
|
|
++ and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
|
|
|
|
++* Embettered all the things!
|
|
|
|
++* Started writing this file.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++0.4.1 (2011-07-17)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Minor bug fixes, code cleanup.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++0.4 (2011-03-01)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Better unicode support.
|
|
|
|
++* Added ``VerifiedHTTPSConnection``.
|
|
|
|
++* Added ``NTLMConnectionPool`` in contrib.
|
|
|
|
++* Minor improvements.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++0.3.1 (2010-07-13)
|
|
|
|
++------------------
|
|
|
|
++
|
|
|
|
++* Added ``assert_host_name`` optional parameter. Now compatible with proxies.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++0.3 (2009-12-10)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Added HTTPS support.
|
|
|
|
++* Minor bug fixes.
|
|
|
|
++* Refactored, broken backwards compatibility with 0.2.
|
|
|
|
++* API to be treated as stable from this version forward.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++0.2 (2008-11-17)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* Added unit tests.
|
|
|
|
++* Bug fixes.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++0.1 (2008-11-16)
|
|
|
|
++----------------
|
|
|
|
++
|
|
|
|
++* First release.
|
|
|
|
++
|
|
|
|
++
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD b/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD
|
|
|
|
+@@ -0,0 +1,44 @@
|
|
|
|
++urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
|
|
|
|
++urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
|
|
|
|
++urllib3/_version.py,sha256=H0vLQ8PY350EPZlZQa8ri0tEjVS-xhGdQOHcU360-0A,63
|
|
|
|
++urllib3/connection.py,sha256=BdaUSNpGzO0zq28i9MhOXb6QZspeVdVrYtjnkk2Eqg4,18396
|
|
|
|
++urllib3/connectionpool.py,sha256=IKoeuJZY9YAYm0GK4q-MXAhyXW0M_FnvabYaNsDIR-E,37133
|
|
|
|
++urllib3/exceptions.py,sha256=lNrKC5J8zeBXIu9SSKSNb7cLi8iXl9ARu9DHD2SflZM,7810
|
|
|
|
++urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
|
|
|
|
++urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
|
|
|
|
++urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763
|
|
|
|
++urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
|
|
|
|
++urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203
|
|
|
|
++urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
|
++urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
|
|
|
|
++urllib3/contrib/appengine.py,sha256=7Pxb0tKfDB_LTGPERiswH0qomhDoUUOo5kwybAKLQyE,11010
|
|
|
|
++urllib3/contrib/ntlmpool.py,sha256=6I95h1_71fzxmoMSNtY0gB8lnyCoVtP_DpqFGj14fdU,4160
|
|
|
|
++urllib3/contrib/pyopenssl.py,sha256=vgh6j52w9xgwq-3R2kfB5M2JblQATJfKAK3lIAc1kSg,16778
|
|
|
|
++urllib3/contrib/securetransport.py,sha256=KxGPZk8d4YepWm7Rc-SBt1XrzIfnLKc8JkUVV75XzgE,34286
|
|
|
|
++urllib3/contrib/socks.py,sha256=DcRjM2l0rQMIyhYrN6r-tnVkY6ZTDxHJlM8_usAkGCA,7097
|
|
|
|
++urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
|
++urllib3/contrib/_securetransport/bindings.py,sha256=E1_7ScsgOchfxneozbAueK7ziCwF35fna4DuDCYJ9_o,17637
|
|
|
|
++urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908
|
|
|
|
++urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
|
|
|
|
++urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
|
|
|
|
++urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
|
++urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
|
|
|
|
++urllib3/packages/ssl_match_hostname/__init__.py,sha256=zppezdEQdpGsYerI6mV6MfUYy495JV4mcOWC_GgbljU,757
|
|
|
|
++urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
|
|
|
|
++urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
|
|
|
|
++urllib3/util/connection.py,sha256=21B-LX0c8fkxPDssyHCaK0pCnmrKmhltg5EoouHiAPU,4910
|
|
|
|
++urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604
|
|
|
|
++urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
|
|
|
|
++urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123
|
|
|
|
++urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
|
|
|
|
++urllib3/util/retry.py,sha256=tn168HDMUynFmXRP-uVaLRUOlbTEJikoB1RuZdwfCes,21366
|
|
|
|
++urllib3/util/ssl_.py,sha256=cUsmU604z2zAOZcaXDpINXOokQ1RtlJMe96TBDkaJp0,16199
|
|
|
|
++urllib3/util/ssltransport.py,sha256=IvGQvs9YWkf4jzfqVjTu_UWjwAUgPn5ActajW8VLz6A,6908
|
|
|
|
++urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
|
|
|
|
++urllib3/util/url.py,sha256=LWfLSlI4l2FmUMKfCkElCaW10-0N-sJDT9bxaDZJkjs,13964
|
|
|
|
++urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
|
|
|
|
++urllib3-1.26.0.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
|
|
|
|
++urllib3-1.26.0.dist-info/METADATA,sha256=Wghdt6nLf9HfZHhWj8Dpgz4n9vGRqXYhdIwJRPgki6M,42629
|
|
|
|
++urllib3-1.26.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
|
|
|
|
++urllib3-1.26.0.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
|
|
|
|
++urllib3-1.26.0.dist-info/RECORD,,
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL b/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL
|
|
|
|
+@@ -0,0 +1,6 @@
|
|
|
|
++Wheel-Version: 1.0
|
|
|
|
++Generator: bdist_wheel (0.35.1)
|
|
|
|
++Root-Is-Purelib: true
|
|
|
|
++Tag: py2-none-any
|
|
|
|
++Tag: py3-none-any
|
|
|
|
++
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/top_level.txt b/third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt
|
|
|
|
+rename from third_party/python/urllib3/urllib3-1.25.9.dist-info/top_level.txt
|
|
|
|
+rename to third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/__init__.py b/third_party/python/urllib3/urllib3/__init__.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/__init__.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/__init__.py
|
|
|
|
+@@ -1,33 +1,32 @@
|
|
|
|
+ """
|
|
|
|
+-urllib3 - Thread-safe connection pooling and re-using.
|
|
|
|
++Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
|
|
|
|
+ """
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
++# Set default logging handler to avoid "No handler found" warnings.
|
|
|
|
++import logging
|
|
|
|
+ import warnings
|
|
|
|
+-
|
|
|
|
+-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
|
|
|
|
++from logging import NullHandler
|
|
|
|
+
|
|
|
|
+ from . import exceptions
|
|
|
|
++from ._version import __version__
|
|
|
|
++from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
|
|
|
|
+ from .filepost import encode_multipart_formdata
|
|
|
|
+ from .poolmanager import PoolManager, ProxyManager, proxy_from_url
|
|
|
|
+ from .response import HTTPResponse
|
|
|
|
+ from .util.request import make_headers
|
|
|
|
+-from .util.url import get_host
|
|
|
|
++from .util.retry import Retry
|
|
|
|
+ from .util.timeout import Timeout
|
|
|
|
+-from .util.retry import Retry
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-# Set default logging handler to avoid "No handler found" warnings.
|
|
|
|
+-import logging
|
|
|
|
+-from logging import NullHandler
|
|
|
|
++from .util.url import get_host
|
|
|
|
+
|
|
|
|
+ __author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
|
|
|
|
+ __license__ = "MIT"
|
|
|
|
+-__version__ = "1.25.9"
|
|
|
|
++__version__ = __version__
|
|
|
|
+
|
|
|
|
+ __all__ = (
|
|
|
|
+ "HTTPConnectionPool",
|
|
|
|
+ "HTTPSConnectionPool",
|
|
|
|
+ "PoolManager",
|
|
|
|
+ "ProxyManager",
|
|
|
|
+ "HTTPResponse",
|
|
|
|
+ "Retry",
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/_collections.py b/third_party/python/urllib3/urllib3/_collections.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/_collections.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/_collections.py
|
|
|
|
+@@ -12,19 +12,20 @@ except ImportError: # Platform-specific
|
|
|
|
+ def __enter__(self):
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+ def __exit__(self, exc_type, exc_value, traceback):
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ from collections import OrderedDict
|
|
|
|
++
|
|
|
|
+ from .exceptions import InvalidHeader
|
|
|
|
+-from .packages.six import iterkeys, itervalues, PY3
|
|
|
|
+-
|
|
|
|
++from .packages import six
|
|
|
|
++from .packages.six import iterkeys, itervalues
|
|
|
|
+
|
|
|
|
+ __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ _Null = object()
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class RecentlyUsedContainer(MutableMapping):
|
|
|
|
+@@ -169,33 +170,33 @@ class HTTPHeaderDict(MutableMapping):
|
|
|
|
+ other = type(self)(other)
|
|
|
|
+ return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
|
|
|
|
+ (k.lower(), v) for k, v in other.itermerged()
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ def __ne__(self, other):
|
|
|
|
+ return not self.__eq__(other)
|
|
|
|
+
|
|
|
|
+- if not PY3: # Python 2
|
|
|
|
++ if six.PY2: # Python 2
|
|
|
|
+ iterkeys = MutableMapping.iterkeys
|
|
|
|
+ itervalues = MutableMapping.itervalues
|
|
|
|
+
|
|
|
|
+ __marker = object()
|
|
|
|
+
|
|
|
|
+ def __len__(self):
|
|
|
|
+ return len(self._container)
|
|
|
|
+
|
|
|
|
+ def __iter__(self):
|
|
|
|
+ # Only provide the originally cased names
|
|
|
|
+ for vals in self._container.values():
|
|
|
|
+ yield vals[0]
|
|
|
|
+
|
|
|
|
+ def pop(self, key, default=__marker):
|
|
|
|
+ """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
|
|
|
|
+- If key is not found, d is returned if given, otherwise KeyError is raised.
|
|
|
|
++ If key is not found, d is returned if given, otherwise KeyError is raised.
|
|
|
|
+ """
|
|
|
|
+ # Using the MutableMapping function directly fails due to the private marker.
|
|
|
|
+ # Using ordinary dict.pop would expose the internal structures.
|
|
|
|
+ # So let's reinvent the wheel.
|
|
|
|
+ try:
|
|
|
|
+ value = self[key]
|
|
|
|
+ except KeyError:
|
|
|
|
+ if default is self.__marker:
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/_version.py b/third_party/python/urllib3/urllib3/_version.py
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/_version.py
|
|
|
|
+@@ -0,0 +1,2 @@
|
|
|
|
++# This file is protected via CODEOWNERS
|
|
|
|
++__version__ = "1.26.0"
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/connection.py b/third_party/python/urllib3/urllib3/connection.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/connection.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/connection.py
|
|
|
|
+@@ -1,19 +1,23 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+-import re
|
|
|
|
++
|
|
|
|
+ import datetime
|
|
|
|
+ import logging
|
|
|
|
+ import os
|
|
|
|
++import re
|
|
|
|
+ import socket
|
|
|
|
+-from socket import error as SocketError, timeout as SocketTimeout
|
|
|
|
+ import warnings
|
|
|
|
++from socket import error as SocketError
|
|
|
|
++from socket import timeout as SocketTimeout
|
|
|
|
++
|
|
|
|
+ from .packages import six
|
|
|
|
+ from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
|
|
|
|
+ from .packages.six.moves.http_client import HTTPException # noqa: F401
|
|
|
|
++from .util.proxy import create_proxy_ssl_context
|
|
|
|
+
|
|
|
|
+ try: # Compiled with SSL?
|
|
|
|
+ import ssl
|
|
|
|
+
|
|
|
|
+ BaseSSLError = ssl.SSLError
|
|
|
|
+ except (ImportError, AttributeError): # Platform-specific: No SSL.
|
|
|
|
+ ssl = None
|
|
|
|
+
|
|
|
|
+@@ -25,76 +29,77 @@ try:
|
|
|
|
+ # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
|
|
|
|
+ ConnectionError = ConnectionError
|
|
|
|
+ except NameError:
|
|
|
|
+ # Python 2
|
|
|
|
+ class ConnectionError(Exception):
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
++try: # Python 3:
|
|
|
|
++ # Not a no-op, we're adding this to the namespace so it can be imported.
|
|
|
|
++ BrokenPipeError = BrokenPipeError
|
|
|
|
++except NameError: # Python 2:
|
|
|
|
++
|
|
|
|
++ class BrokenPipeError(Exception):
|
|
|
|
++ pass
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++from ._version import __version__
|
|
|
|
+ from .exceptions import (
|
|
|
|
++ ConnectTimeoutError,
|
|
|
|
+ NewConnectionError,
|
|
|
|
+- ConnectTimeoutError,
|
|
|
|
+ SubjectAltNameWarning,
|
|
|
|
+ SystemTimeWarning,
|
|
|
|
+ )
|
|
|
|
+-from .packages.ssl_match_hostname import match_hostname, CertificateError
|
|
|
|
+-
|
|
|
|
++from .packages.ssl_match_hostname import CertificateError, match_hostname
|
|
|
|
++from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
|
|
|
|
+ from .util.ssl_ import (
|
|
|
|
+- resolve_cert_reqs,
|
|
|
|
+- resolve_ssl_version,
|
|
|
|
+ assert_fingerprint,
|
|
|
|
+ create_urllib3_context,
|
|
|
|
++ resolve_cert_reqs,
|
|
|
|
++ resolve_ssl_version,
|
|
|
|
+ ssl_wrap_socket,
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+-
|
|
|
|
+-from .util import connection
|
|
|
|
+-
|
|
|
|
+-from ._collections import HTTPHeaderDict
|
|
|
|
+-
|
|
|
|
+ log = logging.getLogger(__name__)
|
|
|
|
+
|
|
|
|
+ port_by_scheme = {"http": 80, "https": 443}
|
|
|
|
+
|
|
|
|
+ # When it comes time to update this value as a part of regular maintenance
|
|
|
|
+ # (ie test_recent_date is failing) update it to ~6 months before the current date.
|
|
|
|
+ RECENT_DATE = datetime.date(2019, 1, 1)
|
|
|
|
+
|
|
|
|
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+-class DummyConnection(object):
|
|
|
|
+- """Used to detect a failed ConnectionCls import."""
|
|
|
|
+-
|
|
|
|
+- pass
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+ class HTTPConnection(_HTTPConnection, object):
|
|
|
|
+ """
|
|
|
|
+- Based on httplib.HTTPConnection but provides an extra constructor
|
|
|
|
++ Based on :class:`http.client.HTTPConnection` but provides an extra constructor
|
|
|
|
+ backwards-compatibility layer between older and newer Pythons.
|
|
|
|
+
|
|
|
|
+ Additional keyword parameters are used to configure attributes of the connection.
|
|
|
|
+ Accepted parameters include:
|
|
|
|
+
|
|
|
|
+- - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
|
|
|
|
+- - ``source_address``: Set the source address for the current connection.
|
|
|
|
+- - ``socket_options``: Set specific options on the underlying socket. If not specified, then
|
|
|
|
+- defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
|
|
|
|
+- Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
|
|
|
|
++ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
|
|
|
|
++ - ``source_address``: Set the source address for the current connection.
|
|
|
|
++ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
|
|
|
|
++ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
|
|
|
|
++ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
|
|
|
|
+
|
|
|
|
+- For example, if you wish to enable TCP Keep Alive in addition to the defaults,
|
|
|
|
+- you might pass::
|
|
|
|
++ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
|
|
|
|
++ you might pass:
|
|
|
|
++
|
|
|
|
++ .. code-block:: python
|
|
|
|
+
|
|
|
|
+- HTTPConnection.default_socket_options + [
|
|
|
|
+- (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
|
|
|
|
+- ]
|
|
|
|
++ HTTPConnection.default_socket_options + [
|
|
|
|
++ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
|
|
|
|
++ ]
|
|
|
|
+
|
|
|
|
+- Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
|
|
|
|
++ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ default_port = port_by_scheme["http"]
|
|
|
|
+
|
|
|
|
+ #: Disable Nagle's algorithm by default.
|
|
|
|
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
|
|
|
|
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
|
|
|
|
+
|
|
|
|
+@@ -107,16 +112,20 @@ class HTTPConnection(_HTTPConnection, ob
|
|
|
|
+
|
|
|
|
+ # Pre-set source_address.
|
|
|
|
+ self.source_address = kw.get("source_address")
|
|
|
|
+
|
|
|
|
+ #: The socket options provided by the user. If no options are
|
|
|
|
+ #: provided, we use the default options.
|
|
|
|
+ self.socket_options = kw.pop("socket_options", self.default_socket_options)
|
|
|
|
+
|
|
|
|
++ # Proxy options provided by the user.
|
|
|
|
++ self.proxy = kw.pop("proxy", None)
|
|
|
|
++ self.proxy_config = kw.pop("proxy_config", None)
|
|
|
|
++
|
|
|
|
+ _HTTPConnection.__init__(self, *args, **kw)
|
|
|
|
+
|
|
|
|
+ @property
|
|
|
|
+ def host(self):
|
|
|
|
+ """
|
|
|
|
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
|
|
|
|
+
|
|
|
|
+ In general, SSL certificates don't include the trailing dot indicating a
|
|
|
|
+@@ -139,17 +148,17 @@ class HTTPConnection(_HTTPConnection, ob
|
|
|
|
+ Setter for the `host` property.
|
|
|
|
+
|
|
|
|
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
|
|
|
|
+ only uses `host`, and it seems reasonable that other libraries follow suit.
|
|
|
|
+ """
|
|
|
|
+ self._dns_host = value
|
|
|
|
+
|
|
|
|
+ def _new_conn(self):
|
|
|
|
+- """ Establish a socket connection and set nodelay settings on it.
|
|
|
|
++ """Establish a socket connection and set nodelay settings on it.
|
|
|
|
+
|
|
|
|
+ :return: New socket connection.
|
|
|
|
+ """
|
|
|
|
+ extra_kw = {}
|
|
|
|
+ if self.source_address:
|
|
|
|
+ extra_kw["source_address"] = self.source_address
|
|
|
|
+
|
|
|
|
+ if self.socket_options:
|
|
|
|
+@@ -169,85 +178,120 @@ class HTTPConnection(_HTTPConnection, ob
|
|
|
|
+
|
|
|
|
+ except SocketError as e:
|
|
|
|
+ raise NewConnectionError(
|
|
|
|
+ self, "Failed to establish a new connection: %s" % e
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ return conn
|
|
|
|
+
|
|
|
|
++ def _is_using_tunnel(self):
|
|
|
|
++ # Google App Engine's httplib does not define _tunnel_host
|
|
|
|
++ return getattr(self, "_tunnel_host", None)
|
|
|
|
++
|
|
|
|
+ def _prepare_conn(self, conn):
|
|
|
|
+ self.sock = conn
|
|
|
|
+- # Google App Engine's httplib does not define _tunnel_host
|
|
|
|
+- if getattr(self, "_tunnel_host", None):
|
|
|
|
++ if self._is_using_tunnel():
|
|
|
|
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
|
|
|
|
+ self._tunnel()
|
|
|
|
+ # Mark this connection as not reusable
|
|
|
|
+ self.auto_open = 0
|
|
|
|
+
|
|
|
|
+ def connect(self):
|
|
|
|
+ conn = self._new_conn()
|
|
|
|
+ self._prepare_conn(conn)
|
|
|
|
+
|
|
|
|
+ def putrequest(self, method, url, *args, **kwargs):
|
|
|
|
+- """Send a request to the server"""
|
|
|
|
++ """"""
|
|
|
|
++ # Empty docstring because the indentation of CPython's implementation
|
|
|
|
++ # is broken but we don't want this method in our documentation.
|
|
|
|
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
|
|
|
|
+ if match:
|
|
|
|
+ raise ValueError(
|
|
|
|
+ "Method cannot contain non-token characters %r (found at least %r)"
|
|
|
|
+ % (method, match.group())
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
|
|
|
|
+
|
|
|
|
++ def putheader(self, header, *values):
|
|
|
|
++ """"""
|
|
|
|
++ if SKIP_HEADER not in values:
|
|
|
|
++ _HTTPConnection.putheader(self, header, *values)
|
|
|
|
++ elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
|
|
|
|
++ raise ValueError(
|
|
|
|
++ "urllib3.util.SKIP_HEADER only supports '%s'"
|
|
|
|
++ % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++ def request(self, method, url, body=None, headers=None):
|
|
|
|
++ if headers is None:
|
|
|
|
++ headers = {}
|
|
|
|
++ else:
|
|
|
|
++ # Avoid modifying the headers passed into .request()
|
|
|
|
++ headers = headers.copy()
|
|
|
|
++ if "user-agent" not in (k.lower() for k in headers):
|
|
|
|
++ headers["User-Agent"] = _get_default_user_agent()
|
|
|
|
++ super(HTTPConnection, self).request(method, url, body=body, headers=headers)
|
|
|
|
++
|
|
|
|
+ def request_chunked(self, method, url, body=None, headers=None):
|
|
|
|
+ """
|
|
|
|
+ Alternative to the common request method, which sends the
|
|
|
|
+ body with chunked encoding and not as one block
|
|
|
|
+ """
|
|
|
|
+- headers = HTTPHeaderDict(headers if headers is not None else {})
|
|
|
|
+- skip_accept_encoding = "accept-encoding" in headers
|
|
|
|
+- skip_host = "host" in headers
|
|
|
|
++ headers = headers or {}
|
|
|
|
++ header_keys = set([six.ensure_str(k.lower()) for k in headers])
|
|
|
|
++ skip_accept_encoding = "accept-encoding" in header_keys
|
|
|
|
++ skip_host = "host" in header_keys
|
|
|
|
+ self.putrequest(
|
|
|
|
+ method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
|
|
|
|
+ )
|
|
|
|
++ if "user-agent" not in header_keys:
|
|
|
|
++ self.putheader("User-Agent", _get_default_user_agent())
|
|
|
|
+ for header, value in headers.items():
|
|
|
|
+ self.putheader(header, value)
|
|
|
|
+ if "transfer-encoding" not in headers:
|
|
|
|
+ self.putheader("Transfer-Encoding", "chunked")
|
|
|
|
+ self.endheaders()
|
|
|
|
+
|
|
|
|
+ if body is not None:
|
|
|
|
+ stringish_types = six.string_types + (bytes,)
|
|
|
|
+ if isinstance(body, stringish_types):
|
|
|
|
+ body = (body,)
|
|
|
|
+ for chunk in body:
|
|
|
|
+ if not chunk:
|
|
|
|
+ continue
|
|
|
|
+ if not isinstance(chunk, bytes):
|
|
|
|
+ chunk = chunk.encode("utf8")
|
|
|
|
+ len_str = hex(len(chunk))[2:]
|
|
|
|
+- self.send(len_str.encode("utf-8"))
|
|
|
|
+- self.send(b"\r\n")
|
|
|
|
+- self.send(chunk)
|
|
|
|
+- self.send(b"\r\n")
|
|
|
|
++ to_send = bytearray(len_str.encode())
|
|
|
|
++ to_send += b"\r\n"
|
|
|
|
++ to_send += chunk
|
|
|
|
++ to_send += b"\r\n"
|
|
|
|
++ self.send(to_send)
|
|
|
|
+
|
|
|
|
+ # After the if clause, to always have a closed body
|
|
|
|
+ self.send(b"0\r\n\r\n")
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class HTTPSConnection(HTTPConnection):
|
|
|
|
++ """
|
|
|
|
++ Many of the parameters to this constructor are passed to the underlying SSL
|
|
|
|
++ socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
|
|
|
|
++ """
|
|
|
|
++
|
|
|
|
+ default_port = port_by_scheme["https"]
|
|
|
|
+
|
|
|
|
+ cert_reqs = None
|
|
|
|
+ ca_certs = None
|
|
|
|
+ ca_cert_dir = None
|
|
|
|
+ ca_cert_data = None
|
|
|
|
+ ssl_version = None
|
|
|
|
+ assert_fingerprint = None
|
|
|
|
++ tls_in_tls_required = False
|
|
|
|
+
|
|
|
|
+ def __init__(
|
|
|
|
+ self,
|
|
|
|
+ host,
|
|
|
|
+ port=None,
|
|
|
|
+ key_file=None,
|
|
|
|
+ cert_file=None,
|
|
|
|
+ key_password=None,
|
|
|
|
+@@ -302,20 +346,25 @@ class HTTPSConnection(HTTPConnection):
|
|
|
|
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
|
|
|
|
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
|
|
|
|
+ self.ca_cert_data = ca_cert_data
|
|
|
|
+
|
|
|
|
+ def connect(self):
|
|
|
|
+ # Add certificate verification
|
|
|
|
+ conn = self._new_conn()
|
|
|
|
+ hostname = self.host
|
|
|
|
++ tls_in_tls = False
|
|
|
|
+
|
|
|
|
+- # Google App Engine's httplib does not define _tunnel_host
|
|
|
|
+- if getattr(self, "_tunnel_host", None):
|
|
|
|
++ if self._is_using_tunnel():
|
|
|
|
++ if self.tls_in_tls_required:
|
|
|
|
++ conn = self._connect_tls_proxy(hostname, conn)
|
|
|
|
++ tls_in_tls = True
|
|
|
|
++
|
|
|
|
+ self.sock = conn
|
|
|
|
++
|
|
|
|
+ # Calls self._set_hostport(), so self.host is
|
|
|
|
+ # self._tunnel_host below.
|
|
|
|
+ self._tunnel()
|
|
|
|
+ # Mark this connection as not reusable
|
|
|
|
+ self.auto_open = 0
|
|
|
|
+
|
|
|
|
+ # Override the host with the one we're requesting data from.
|
|
|
|
+ hostname = self._tunnel_host
|
|
|
|
+@@ -363,18 +412,36 @@ class HTTPSConnection(HTTPConnection):
|
|
|
|
+ keyfile=self.key_file,
|
|
|
|
+ certfile=self.cert_file,
|
|
|
|
+ key_password=self.key_password,
|
|
|
|
+ ca_certs=self.ca_certs,
|
|
|
|
+ ca_cert_dir=self.ca_cert_dir,
|
|
|
|
+ ca_cert_data=self.ca_cert_data,
|
|
|
|
+ server_hostname=server_hostname,
|
|
|
|
+ ssl_context=context,
|
|
|
|
++ tls_in_tls=tls_in_tls,
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
++ # If we're using all defaults and the connection
|
|
|
|
++ # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
|
|
|
|
++ # for the host.
|
|
|
|
++ if (
|
|
|
|
++ default_ssl_context
|
|
|
|
++ and self.ssl_version is None
|
|
|
|
++ and hasattr(self.sock, "version")
|
|
|
|
++ and self.sock.version() in {"TLSv1", "TLSv1.1"}
|
|
|
|
++ ):
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Negotiating TLSv1/TLSv1.1 by default is deprecated "
|
|
|
|
++ "and will be disabled in urllib3 v2.0.0. Connecting to "
|
|
|
|
++ "'%s' with '%s' can be enabled by explicitly opting-in "
|
|
|
|
++ "with 'ssl_version'" % (self.host, self.sock.version()),
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
+ if self.assert_fingerprint:
|
|
|
|
+ assert_fingerprint(
|
|
|
|
+ self.sock.getpeercert(binary_form=True), self.assert_fingerprint
|
|
|
|
+ )
|
|
|
|
+ elif (
|
|
|
|
+ context.verify_mode != ssl.CERT_NONE
|
|
|
|
+ and not getattr(context, "check_hostname", False)
|
|
|
|
+ and self.assert_hostname is not False
|
|
|
|
+@@ -395,29 +462,73 @@ class HTTPSConnection(HTTPConnection):
|
|
|
|
+ )
|
|
|
|
+ _match_hostname(cert, self.assert_hostname or server_hostname)
|
|
|
|
+
|
|
|
|
+ self.is_verified = (
|
|
|
|
+ context.verify_mode == ssl.CERT_REQUIRED
|
|
|
|
+ or self.assert_fingerprint is not None
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
++ def _connect_tls_proxy(self, hostname, conn):
|
|
|
|
++ """
|
|
|
|
++ Establish a TLS connection to the proxy using the provided SSL context.
|
|
|
|
++ """
|
|
|
|
++ proxy_config = self.proxy_config
|
|
|
|
++ ssl_context = proxy_config.ssl_context
|
|
|
|
++ if ssl_context:
|
|
|
|
++ # If the user provided a proxy context, we assume CA and client
|
|
|
|
++ # certificates have already been set
|
|
|
|
++ return ssl_wrap_socket(
|
|
|
|
++ sock=conn,
|
|
|
|
++ server_hostname=hostname,
|
|
|
|
++ ssl_context=ssl_context,
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++ ssl_context = create_proxy_ssl_context(
|
|
|
|
++ self.ssl_version,
|
|
|
|
++ self.cert_reqs,
|
|
|
|
++ self.ca_certs,
|
|
|
|
++ self.ca_cert_dir,
|
|
|
|
++ self.ca_cert_data,
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++ # If no cert was provided, use only the default options for server
|
|
|
|
++ # certificate validation
|
|
|
|
++ return ssl_wrap_socket(
|
|
|
|
++ sock=conn,
|
|
|
|
++ ca_certs=self.ca_certs,
|
|
|
|
++ ca_cert_dir=self.ca_cert_dir,
|
|
|
|
++ ca_cert_data=self.ca_cert_data,
|
|
|
|
++ server_hostname=hostname,
|
|
|
|
++ ssl_context=ssl_context,
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
+
|
|
|
|
+ def _match_hostname(cert, asserted_hostname):
|
|
|
|
+ try:
|
|
|
|
+ match_hostname(cert, asserted_hostname)
|
|
|
|
+ except CertificateError as e:
|
|
|
|
+ log.warning(
|
|
|
|
+ "Certificate did not match expected hostname: %s. Certificate: %s",
|
|
|
|
+ asserted_hostname,
|
|
|
|
+ cert,
|
|
|
|
+ )
|
|
|
|
+ # Add cert to exception and reraise so client code can inspect
|
|
|
|
+ # the cert when catching the exception, if they want to
|
|
|
|
+ e._peer_cert = cert
|
|
|
|
+ raise
|
|
|
|
+
|
|
|
|
+
|
|
|
|
++def _get_default_user_agent():
|
|
|
|
++ return "python-urllib3/%s" % __version__
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++class DummyConnection(object):
|
|
|
|
++ """Used to detect a failed ConnectionCls import."""
|
|
|
|
++
|
|
|
|
++ pass
|
|
|
|
++
|
|
|
|
++
|
|
|
|
+ if not ssl:
|
|
|
|
+ HTTPSConnection = DummyConnection # noqa: F811
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ VerifiedHTTPSConnection = HTTPSConnection
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/connectionpool.py b/third_party/python/urllib3/urllib3/connectionpool.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/connectionpool.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/connectionpool.py
|
|
|
|
+@@ -1,62 +1,58 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ import errno
|
|
|
|
+ import logging
|
|
|
|
++import socket
|
|
|
|
+ import sys
|
|
|
|
+ import warnings
|
|
|
|
++from socket import error as SocketError
|
|
|
|
++from socket import timeout as SocketTimeout
|
|
|
|
+
|
|
|
|
+-from socket import error as SocketError, timeout as SocketTimeout
|
|
|
|
+-import socket
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
++from .connection import (
|
|
|
|
++ BaseSSLError,
|
|
|
|
++ BrokenPipeError,
|
|
|
|
++ DummyConnection,
|
|
|
|
++ HTTPConnection,
|
|
|
|
++ HTTPException,
|
|
|
|
++ HTTPSConnection,
|
|
|
|
++ VerifiedHTTPSConnection,
|
|
|
|
++ port_by_scheme,
|
|
|
|
++)
|
|
|
|
+ from .exceptions import (
|
|
|
|
+ ClosedPoolError,
|
|
|
|
+- ProtocolError,
|
|
|
|
+ EmptyPoolError,
|
|
|
|
+ HeaderParsingError,
|
|
|
|
+ HostChangedError,
|
|
|
|
++ InsecureRequestWarning,
|
|
|
|
+ LocationValueError,
|
|
|
|
+ MaxRetryError,
|
|
|
|
++ NewConnectionError,
|
|
|
|
++ ProtocolError,
|
|
|
|
+ ProxyError,
|
|
|
|
+ ReadTimeoutError,
|
|
|
|
+ SSLError,
|
|
|
|
+ TimeoutError,
|
|
|
|
+- InsecureRequestWarning,
|
|
|
|
+- NewConnectionError,
|
|
|
|
+ )
|
|
|
|
+-from .packages.ssl_match_hostname import CertificateError
|
|
|
|
+ from .packages import six
|
|
|
|
+ from .packages.six.moves import queue
|
|
|
|
+-from .connection import (
|
|
|
|
+- port_by_scheme,
|
|
|
|
+- DummyConnection,
|
|
|
|
+- HTTPConnection,
|
|
|
|
+- HTTPSConnection,
|
|
|
|
+- VerifiedHTTPSConnection,
|
|
|
|
+- HTTPException,
|
|
|
|
+- BaseSSLError,
|
|
|
|
+-)
|
|
|
|
++from .packages.ssl_match_hostname import CertificateError
|
|
|
|
+ from .request import RequestMethods
|
|
|
|
+ from .response import HTTPResponse
|
|
|
|
+-
|
|
|
|
+ from .util.connection import is_connection_dropped
|
|
|
|
++from .util.proxy import connection_requires_http_tunnel
|
|
|
|
++from .util.queue import LifoQueue
|
|
|
|
+ from .util.request import set_file_position
|
|
|
|
+ from .util.response import assert_header_parsing
|
|
|
|
+ from .util.retry import Retry
|
|
|
|
+ from .util.timeout import Timeout
|
|
|
|
+-from .util.url import (
|
|
|
|
+- get_host,
|
|
|
|
+- parse_url,
|
|
|
|
+- Url,
|
|
|
|
+- _normalize_host as normalize_host,
|
|
|
|
+- _encode_target,
|
|
|
|
+-)
|
|
|
|
+-from .util.queue import LifoQueue
|
|
|
|
+-
|
|
|
|
++from .util.url import Url, _encode_target
|
|
|
|
++from .util.url import _normalize_host as normalize_host
|
|
|
|
++from .util.url import get_host, parse_url
|
|
|
|
+
|
|
|
|
+ xrange = six.moves.xrange
|
|
|
|
+
|
|
|
|
+ log = logging.getLogger(__name__)
|
|
|
|
+
|
|
|
|
+ _Default = object()
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+@@ -106,26 +102,26 @@ class ConnectionPool(object):
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class HTTPConnectionPool(ConnectionPool, RequestMethods):
|
|
|
|
+ """
|
|
|
|
+ Thread-safe connection pool for one host.
|
|
|
|
+
|
|
|
|
+ :param host:
|
|
|
|
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
|
|
|
|
+- :class:`httplib.HTTPConnection`.
|
|
|
|
++ :class:`http.client.HTTPConnection`.
|
|
|
|
+
|
|
|
|
+ :param port:
|
|
|
|
+ Port used for this HTTP Connection (None is equivalent to 80), passed
|
|
|
|
+- into :class:`httplib.HTTPConnection`.
|
|
|
|
++ into :class:`http.client.HTTPConnection`.
|
|
|
|
+
|
|
|
|
+ :param strict:
|
|
|
|
+ Causes BadStatusLine to be raised if the status line can't be parsed
|
|
|
|
+ as a valid HTTP/1.0 or 1.1 status line, passed into
|
|
|
|
+- :class:`httplib.HTTPConnection`.
|
|
|
|
++ :class:`http.client.HTTPConnection`.
|
|
|
|
+
|
|
|
|
+ .. note::
|
|
|
|
+ Only works in Python 2. This parameter is ignored in Python 3.
|
|
|
|
+
|
|
|
|
+ :param timeout:
|
|
|
|
+ Socket timeout in seconds for each individual connection. This can
|
|
|
|
+ be a float or integer, which sets the timeout for the HTTP request,
|
|
|
|
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
|
|
|
|
+@@ -149,21 +145,21 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ Headers to include with all requests, unless other headers are given
|
|
|
|
+ explicitly.
|
|
|
|
+
|
|
|
|
+ :param retries:
|
|
|
|
+ Retry configuration to use by default with requests in this pool.
|
|
|
|
+
|
|
|
|
+ :param _proxy:
|
|
|
|
+ Parsed proxy URL, should not be used directly, instead, see
|
|
|
|
+- :class:`urllib3.connectionpool.ProxyManager`"
|
|
|
|
++ :class:`urllib3.ProxyManager`
|
|
|
|
+
|
|
|
|
+ :param _proxy_headers:
|
|
|
|
+ A dictionary with proxy headers, should not be used directly,
|
|
|
|
+- instead, see :class:`urllib3.connectionpool.ProxyManager`"
|
|
|
|
++ instead, see :class:`urllib3.ProxyManager`
|
|
|
|
+
|
|
|
|
+ :param \\**conn_kw:
|
|
|
|
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
|
|
|
|
+ :class:`urllib3.connection.HTTPSConnection` instances.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ scheme = "http"
|
|
|
|
+ ConnectionCls = HTTPConnection
|
|
|
|
+@@ -176,16 +172,17 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ strict=False,
|
|
|
|
+ timeout=Timeout.DEFAULT_TIMEOUT,
|
|
|
|
+ maxsize=1,
|
|
|
|
+ block=False,
|
|
|
|
+ headers=None,
|
|
|
|
+ retries=None,
|
|
|
|
+ _proxy=None,
|
|
|
|
+ _proxy_headers=None,
|
|
|
|
++ _proxy_config=None,
|
|
|
|
+ **conn_kw
|
|
|
|
+ ):
|
|
|
|
+ ConnectionPool.__init__(self, host, port)
|
|
|
|
+ RequestMethods.__init__(self, headers)
|
|
|
|
+
|
|
|
|
+ self.strict = strict
|
|
|
|
+
|
|
|
|
+ if not isinstance(timeout, Timeout):
|
|
|
|
+@@ -197,32 +194,36 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ self.timeout = timeout
|
|
|
|
+ self.retries = retries
|
|
|
|
+
|
|
|
|
+ self.pool = self.QueueCls(maxsize)
|
|
|
|
+ self.block = block
|
|
|
|
+
|
|
|
|
+ self.proxy = _proxy
|
|
|
|
+ self.proxy_headers = _proxy_headers or {}
|
|
|
|
++ self.proxy_config = _proxy_config
|
|
|
|
+
|
|
|
|
+ # Fill the queue up so that doing get() on it will block properly
|
|
|
|
+ for _ in xrange(maxsize):
|
|
|
|
+ self.pool.put(None)
|
|
|
|
+
|
|
|
|
+ # These are mostly for testing and debugging purposes.
|
|
|
|
+ self.num_connections = 0
|
|
|
|
+ self.num_requests = 0
|
|
|
|
+ self.conn_kw = conn_kw
|
|
|
|
+
|
|
|
|
+ if self.proxy:
|
|
|
|
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
|
|
|
|
+ # We cannot know if the user has added default socket options, so we cannot replace the
|
|
|
|
+ # list.
|
|
|
|
+ self.conn_kw.setdefault("socket_options", [])
|
|
|
|
+
|
|
|
|
++ self.conn_kw["proxy"] = self.proxy
|
|
|
|
++ self.conn_kw["proxy_config"] = self.proxy_config
|
|
|
|
++
|
|
|
|
+ def _new_conn(self):
|
|
|
|
+ """
|
|
|
|
+ Return a fresh :class:`HTTPConnection`.
|
|
|
|
+ """
|
|
|
|
+ self.num_connections += 1
|
|
|
|
+ log.debug(
|
|
|
|
+ "Starting new HTTP connection (%d): %s:%s",
|
|
|
|
+ self.num_connections,
|
|
|
|
+@@ -267,17 +268,17 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ pass # Oh well, we'll create a new connection then
|
|
|
|
+
|
|
|
|
+ # If this is a persistent connection, check if it got disconnected
|
|
|
|
+ if conn and is_connection_dropped(conn):
|
|
|
|
+ log.debug("Resetting dropped connection: %s", self.host)
|
|
|
|
+ conn.close()
|
|
|
|
+ if getattr(conn, "auto_open", 1) == 0:
|
|
|
|
+ # This is a proxied connection that has been mutated by
|
|
|
|
+- # httplib._tunnel() and cannot be reused (since it would
|
|
|
|
++ # http.client._tunnel() and cannot be reused (since it would
|
|
|
|
+ # attempt to bypass the proxy)
|
|
|
|
+ conn = None
|
|
|
|
+
|
|
|
|
+ return conn or self._new_conn()
|
|
|
|
+
|
|
|
|
+ def _put_conn(self, conn):
|
|
|
|
+ """
|
|
|
|
+ Put a connection back into the pool.
|
|
|
|
+@@ -379,22 +380,40 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ # Trigger any extra validation we need to do.
|
|
|
|
+ try:
|
|
|
|
+ self._validate_conn(conn)
|
|
|
|
+ except (SocketTimeout, BaseSSLError) as e:
|
|
|
|
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
|
|
|
|
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
|
|
|
|
+ raise
|
|
|
|
+
|
|
|
|
+- # conn.request() calls httplib.*.request, not the method in
|
|
|
|
++ # conn.request() calls http.client.*.request, not the method in
|
|
|
|
+ # urllib3.request. It also calls makefile (recv) on the socket.
|
|
|
|
+- if chunked:
|
|
|
|
+- conn.request_chunked(method, url, **httplib_request_kw)
|
|
|
|
+- else:
|
|
|
|
+- conn.request(method, url, **httplib_request_kw)
|
|
|
|
++ try:
|
|
|
|
++ if chunked:
|
|
|
|
++ conn.request_chunked(method, url, **httplib_request_kw)
|
|
|
|
++ else:
|
|
|
|
++ conn.request(method, url, **httplib_request_kw)
|
|
|
|
++
|
|
|
|
++ # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
|
|
|
|
++ # legitimately able to close the connection after sending a valid response.
|
|
|
|
++ # With this behaviour, the received response is still readable.
|
|
|
|
++ except BrokenPipeError:
|
|
|
|
++ # Python 3
|
|
|
|
++ pass
|
|
|
|
++ except IOError as e:
|
|
|
|
++ # Python 2 and macOS/Linux
|
|
|
|
++ # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
|
|
|
|
++ # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
|
|
|
|
++ if e.errno not in {
|
|
|
|
++ errno.EPIPE,
|
|
|
|
++ errno.ESHUTDOWN,
|
|
|
|
++ errno.EPROTOTYPE,
|
|
|
|
++ }:
|
|
|
|
++ raise
|
|
|
|
+
|
|
|
|
+ # Reset the timeout for the recv() on the socket
|
|
|
|
+ read_timeout = timeout_obj.read_timeout
|
|
|
|
+
|
|
|
|
+ # App Engine doesn't have a sock attr
|
|
|
|
+ if getattr(conn, "sock", None):
|
|
|
|
+ # In Python 3 socket.py will catch EAGAIN and return None when you
|
|
|
|
+ # try and read into the file pointer created by http.client, which
|
|
|
|
+@@ -527,20 +546,22 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ `release_conn` will only behave as expected if
|
|
|
|
+ `preload_content=False` because we want to make
|
|
|
|
+ `preload_content=False` the default behaviour someday soon without
|
|
|
|
+ breaking backwards compatibility.
|
|
|
|
+
|
|
|
|
+ :param method:
|
|
|
|
+ HTTP request method (such as GET, POST, PUT, etc.)
|
|
|
|
+
|
|
|
|
++ :param url:
|
|
|
|
++ The URL to perform the request on.
|
|
|
|
++
|
|
|
|
+ :param body:
|
|
|
|
+- Data to send in the request body (useful for creating
|
|
|
|
+- POST requests, see HTTPConnectionPool.post_url for
|
|
|
|
+- more convenience).
|
|
|
|
++ Data to send in the request body, either :class:`str`, :class:`bytes`,
|
|
|
|
++ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
|
|
|
|
+
|
|
|
|
+ :param headers:
|
|
|
|
+ Dictionary of custom headers to send, such as User-Agent,
|
|
|
|
+ If-None-Match, etc. If None, pool headers are used. If provided,
|
|
|
|
+ these headers completely replace any pool-specific headers.
|
|
|
|
+
|
|
|
|
+ :param retries:
|
|
|
|
+ Configure the number of retries to allow before raising a
|
|
|
|
+@@ -560,17 +581,17 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+
|
|
|
|
+ :param redirect:
|
|
|
|
+ If True, automatically handle redirects (status codes 301, 302,
|
|
|
|
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
|
|
|
|
+ will disable redirect, too.
|
|
|
|
+
|
|
|
|
+ :param assert_same_host:
|
|
|
|
+ If ``True``, will make sure that the host of the pool requests is
|
|
|
|
+- consistent else will raise HostChangedError. When False, you can
|
|
|
|
++ consistent else will raise HostChangedError. When ``False``, you can
|
|
|
|
+ use the pool on an HTTP proxy and request foreign hosts.
|
|
|
|
+
|
|
|
|
+ :param timeout:
|
|
|
|
+ If specified, overrides the default timeout for this one
|
|
|
|
+ request. It may be a float (in seconds) or an instance of
|
|
|
|
+ :class:`urllib3.util.Timeout`.
|
|
|
|
+
|
|
|
|
+ :param pool_timeout:
|
|
|
|
+@@ -597,16 +618,20 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ Position to seek to in file-like body in the event of a retry or
|
|
|
|
+ redirect. Typically this won't need to be set because urllib3 will
|
|
|
|
+ auto-populate the value when needed.
|
|
|
|
+
|
|
|
|
+ :param \\**response_kw:
|
|
|
|
+ Additional parameters are passed to
|
|
|
|
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
|
|
|
|
+ """
|
|
|
|
++
|
|
|
|
++ parsed_url = parse_url(url)
|
|
|
|
++ destination_scheme = parsed_url.scheme
|
|
|
|
++
|
|
|
|
+ if headers is None:
|
|
|
|
+ headers = self.headers
|
|
|
|
+
|
|
|
|
+ if not isinstance(retries, Retry):
|
|
|
|
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
|
|
|
|
+
|
|
|
|
+ if release_conn is None:
|
|
|
|
+ release_conn = response_kw.get("preload_content", True)
|
|
|
|
+@@ -614,35 +639,39 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ # Check host
|
|
|
|
+ if assert_same_host and not self.is_same_host(url):
|
|
|
|
+ raise HostChangedError(self, url, retries)
|
|
|
|
+
|
|
|
|
+ # Ensure that the URL we're connecting to is properly encoded
|
|
|
|
+ if url.startswith("/"):
|
|
|
|
+ url = six.ensure_str(_encode_target(url))
|
|
|
|
+ else:
|
|
|
|
+- url = six.ensure_str(parse_url(url).url)
|
|
|
|
++ url = six.ensure_str(parsed_url.url)
|
|
|
|
+
|
|
|
|
+ conn = None
|
|
|
|
+
|
|
|
|
+ # Track whether `conn` needs to be released before
|
|
|
|
+ # returning/raising/recursing. Update this variable if necessary, and
|
|
|
|
+ # leave `release_conn` constant throughout the function. That way, if
|
|
|
|
+ # the function recurses, the original value of `release_conn` will be
|
|
|
|
+ # passed down into the recursive call, and its value will be respected.
|
|
|
|
+ #
|
|
|
|
+ # See issue #651 [1] for details.
|
|
|
|
+ #
|
|
|
|
+ # [1] <https://github.com/urllib3/urllib3/issues/651>
|
|
|
|
+ release_this_conn = release_conn
|
|
|
|
+
|
|
|
|
+- # Merge the proxy headers. Only do this in HTTP. We have to copy the
|
|
|
|
+- # headers dict so we can safely change it without those changes being
|
|
|
|
+- # reflected in anyone else's copy.
|
|
|
|
+- if self.scheme == "http":
|
|
|
|
++ http_tunnel_required = connection_requires_http_tunnel(
|
|
|
|
++ self.proxy, self.proxy_config, destination_scheme
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++ # Merge the proxy headers. Only done when not using HTTP CONNECT. We
|
|
|
|
++ # have to copy the headers dict so we can safely change it without those
|
|
|
|
++ # changes being reflected in anyone else's copy.
|
|
|
|
++ if not http_tunnel_required:
|
|
|
|
+ headers = headers.copy()
|
|
|
|
+ headers.update(self.proxy_headers)
|
|
|
|
+
|
|
|
|
+ # Must keep the exception bound to a separate variable or else Python 3
|
|
|
|
+ # complains about UnboundLocalError.
|
|
|
|
+ err = None
|
|
|
|
+
|
|
|
|
+ # Keep track of whether we cleanly exited the except block. This
|
|
|
|
+@@ -658,17 +687,17 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ timeout_obj = self._get_timeout(timeout)
|
|
|
|
+ conn = self._get_conn(timeout=pool_timeout)
|
|
|
|
+
|
|
|
|
+ conn.timeout = timeout_obj.connect_timeout
|
|
|
|
+
|
|
|
|
+ is_new_proxy_conn = self.proxy is not None and not getattr(
|
|
|
|
+ conn, "sock", None
|
|
|
|
+ )
|
|
|
|
+- if is_new_proxy_conn:
|
|
|
|
++ if is_new_proxy_conn and http_tunnel_required:
|
|
|
|
+ self._prepare_proxy(conn)
|
|
|
|
+
|
|
|
|
+ # Make the request on the httplib connection object.
|
|
|
|
+ httplib_response = self._make_request(
|
|
|
|
+ conn,
|
|
|
|
+ method,
|
|
|
|
+ url,
|
|
|
|
+ timeout=timeout_obj,
|
|
|
|
+@@ -693,19 +722,21 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+ connection=response_conn,
|
|
|
|
+ retries=retries,
|
|
|
|
+ **response_kw
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ # Everything went great!
|
|
|
|
+ clean_exit = True
|
|
|
|
+
|
|
|
|
+- except queue.Empty:
|
|
|
|
+- # Timed out by queue.
|
|
|
|
+- raise EmptyPoolError(self, "No pool connections are available.")
|
|
|
|
++ except EmptyPoolError:
|
|
|
|
++ # Didn't get a connection from the pool, no need to clean up
|
|
|
|
++ clean_exit = True
|
|
|
|
++ release_this_conn = False
|
|
|
|
++ raise
|
|
|
|
+
|
|
|
|
+ except (
|
|
|
|
+ TimeoutError,
|
|
|
|
+ HTTPException,
|
|
|
|
+ SocketError,
|
|
|
|
+ ProtocolError,
|
|
|
|
+ BaseSSLError,
|
|
|
|
+ SSLError,
|
|
|
|
+@@ -830,21 +861,17 @@ class HTTPConnectionPool(ConnectionPool,
|
|
|
|
+
|
|
|
|
+ return response
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class HTTPSConnectionPool(HTTPConnectionPool):
|
|
|
|
+ """
|
|
|
|
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
|
|
|
|
+
|
|
|
|
+- When Python is compiled with the :mod:`ssl` module, then
|
|
|
|
+- :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
|
|
|
|
+- instead of :class:`.HTTPSConnection`.
|
|
|
|
+-
|
|
|
|
+- :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
|
|
|
|
++ :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
|
|
|
|
+ ``assert_hostname`` and ``host`` in this order to verify connections.
|
|
|
|
+ If ``assert_hostname`` is False, no verification is done.
|
|
|
|
+
|
|
|
|
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
|
|
|
|
+ ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
|
|
|
|
+ is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
|
|
|
|
+ the connection socket into an SSL socket.
|
|
|
|
+ """
|
|
|
|
+@@ -918,25 +945,32 @@ class HTTPSConnectionPool(HTTPConnection
|
|
|
|
+ assert_hostname=self.assert_hostname,
|
|
|
|
+ assert_fingerprint=self.assert_fingerprint,
|
|
|
|
+ )
|
|
|
|
+ conn.ssl_version = self.ssl_version
|
|
|
|
+ return conn
|
|
|
|
+
|
|
|
|
+ def _prepare_proxy(self, conn):
|
|
|
|
+ """
|
|
|
|
+- Establish tunnel connection early, because otherwise httplib
|
|
|
|
+- would improperly set Host: header to proxy's IP:port.
|
|
|
|
++ Establishes a tunnel connection through HTTP CONNECT.
|
|
|
|
++
|
|
|
|
++ Tunnel connection is established early because otherwise httplib would
|
|
|
|
++ improperly set Host: header to proxy's IP:port.
|
|
|
|
+ """
|
|
|
|
++
|
|
|
|
+ conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
|
|
|
|
++
|
|
|
|
++ if self.proxy.scheme == "https":
|
|
|
|
++ conn.tls_in_tls_required = True
|
|
|
|
++
|
|
|
|
+ conn.connect()
|
|
|
|
+
|
|
|
|
+ def _new_conn(self):
|
|
|
|
+ """
|
|
|
|
+- Return a fresh :class:`httplib.HTTPSConnection`.
|
|
|
|
++ Return a fresh :class:`http.client.HTTPSConnection`.
|
|
|
|
+ """
|
|
|
|
+ self.num_connections += 1
|
|
|
|
+ log.debug(
|
|
|
|
+ "Starting new HTTPS connection (%d): %s:%s",
|
|
|
|
+ self.num_connections,
|
|
|
|
+ self.host,
|
|
|
|
+ self.port or "443",
|
|
|
|
+ )
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py b/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
|
|
|
|
+@@ -27,51 +27,70 @@ license and by oscrypto's:
|
|
|
|
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
|
|
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
|
|
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
|
|
+ DEALINGS IN THE SOFTWARE.
|
|
|
|
+ """
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+
|
|
|
|
+ import platform
|
|
|
|
+-from ctypes.util import find_library
|
|
|
|
+ from ctypes import (
|
|
|
|
+- c_void_p,
|
|
|
|
++ CDLL,
|
|
|
|
++ CFUNCTYPE,
|
|
|
|
++ POINTER,
|
|
|
|
++ c_bool,
|
|
|
|
++ c_byte,
|
|
|
|
++ c_char_p,
|
|
|
|
+ c_int32,
|
|
|
|
+- c_char_p,
|
|
|
|
++ c_long,
|
|
|
|
+ c_size_t,
|
|
|
|
+- c_byte,
|
|
|
|
+ c_uint32,
|
|
|
|
+ c_ulong,
|
|
|
|
+- c_long,
|
|
|
|
+- c_bool,
|
|
|
|
++ c_void_p,
|
|
|
|
+ )
|
|
|
|
+-from ctypes import CDLL, POINTER, CFUNCTYPE
|
|
|
|
+-
|
|
|
|
++from ctypes.util import find_library
|
|
|
|
+
|
|
|
|
+-security_path = find_library("Security")
|
|
|
|
+-if not security_path:
|
|
|
|
+- raise ImportError("The library Security could not be found")
|
|
|
|
+-
|
|
|
|
++from urllib3.packages.six import raise_from
|
|
|
|
+
|
|
|
|
+-core_foundation_path = find_library("CoreFoundation")
|
|
|
|
+-if not core_foundation_path:
|
|
|
|
+- raise ImportError("The library CoreFoundation could not be found")
|
|
|
|
+-
|
|
|
|
++if platform.system() != "Darwin":
|
|
|
|
++ raise ImportError("Only macOS is supported")
|
|
|
|
+
|
|
|
|
+ version = platform.mac_ver()[0]
|
|
|
|
+ version_info = tuple(map(int, version.split(".")))
|
|
|
|
+ if version_info < (10, 8):
|
|
|
|
+ raise OSError(
|
|
|
|
+ "Only OS X 10.8 and newer are supported, not %s.%s"
|
|
|
|
+ % (version_info[0], version_info[1])
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+-Security = CDLL(security_path, use_errno=True)
|
|
|
|
+-CoreFoundation = CDLL(core_foundation_path, use_errno=True)
|
|
|
|
++
|
|
|
|
++def load_cdll(name, macos10_16_path):
|
|
|
|
++ """Loads a CDLL by name, falling back to known path on 10.16+"""
|
|
|
|
++ try:
|
|
|
|
++ # Big Sur is technically 11 but we use 10.16 due to the Big Sur
|
|
|
|
++ # beta being labeled as 10.16.
|
|
|
|
++ if version_info >= (10, 16):
|
|
|
|
++ path = macos10_16_path
|
|
|
|
++ else:
|
|
|
|
++ path = find_library(name)
|
|
|
|
++ if not path:
|
|
|
|
++ raise OSError # Caught and reraised as 'ImportError'
|
|
|
|
++ return CDLL(path, use_errno=True)
|
|
|
|
++ except OSError:
|
|
|
|
++ raise_from(ImportError("The library %s failed to load" % name), None)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++Security = load_cdll(
|
|
|
|
++ "Security", "/System/Library/Frameworks/Security.framework/Security"
|
|
|
|
++)
|
|
|
|
++CoreFoundation = load_cdll(
|
|
|
|
++ "CoreFoundation",
|
|
|
|
++ "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
|
|
|
|
++)
|
|
|
|
++
|
|
|
|
+
|
|
|
|
+ Boolean = c_bool
|
|
|
|
+ CFIndex = c_long
|
|
|
|
+ CFStringEncoding = c_uint32
|
|
|
|
+ CFData = c_void_p
|
|
|
|
+ CFString = c_void_p
|
|
|
|
+ CFArray = c_void_p
|
|
|
|
+ CFMutableArray = c_void_p
|
|
|
|
+@@ -271,16 +290,23 @@ try:
|
|
|
|
+ Security.SSLSetSessionOption.restype = OSStatus
|
|
|
|
+
|
|
|
|
+ Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
|
|
|
|
+ Security.SSLSetProtocolVersionMin.restype = OSStatus
|
|
|
|
+
|
|
|
|
+ Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
|
|
|
|
+ Security.SSLSetProtocolVersionMax.restype = OSStatus
|
|
|
|
+
|
|
|
|
++ try:
|
|
|
|
++ Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
|
|
|
|
++ Security.SSLSetALPNProtocols.restype = OSStatus
|
|
|
|
++ except AttributeError:
|
|
|
|
++ # Supported only in 10.12+
|
|
|
|
++ pass
|
|
|
|
++
|
|
|
|
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
|
|
|
|
+ Security.SecCopyErrorMessageString.restype = CFStringRef
|
|
|
|
+
|
|
|
|
+ Security.SSLReadFunc = SSLReadFunc
|
|
|
|
+ Security.SSLWriteFunc = SSLWriteFunc
|
|
|
|
+ Security.SSLContextRef = SSLContextRef
|
|
|
|
+ Security.SSLProtocol = SSLProtocol
|
|
|
|
+ Security.SSLCipherSuite = SSLCipherSuite
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py b/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
|
|
|
|
+@@ -5,23 +5,23 @@ These are Python functions that are not
|
|
|
|
+ but are necessary to get them to work. They include a whole bunch of low-level
|
|
|
|
+ CoreFoundation messing about and memory management. The concerns in this module
|
|
|
|
+ are almost entirely about trying to avoid memory leaks and providing
|
|
|
|
+ appropriate and useful assistance to the higher-level code.
|
|
|
|
+ """
|
|
|
|
+ import base64
|
|
|
|
+ import ctypes
|
|
|
|
+ import itertools
|
|
|
|
++import os
|
|
|
|
+ import re
|
|
|
|
+-import os
|
|
|
|
+ import ssl
|
|
|
|
++import struct
|
|
|
|
+ import tempfile
|
|
|
|
+
|
|
|
|
+-from .bindings import Security, CoreFoundation, CFConst
|
|
|
|
+-
|
|
|
|
++from .bindings import CFConst, CoreFoundation, Security
|
|
|
|
+
|
|
|
|
+ # This regular expression is used to grab PEM data out of a PEM bundle.
|
|
|
|
+ _PEM_CERTS_RE = re.compile(
|
|
|
|
+ b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def _cf_data_from_bytes(bytestring):
|
|
|
|
+@@ -51,16 +51,61 @@ def _cf_dictionary_from_tuples(tuples):
|
|
|
|
+ cf_keys,
|
|
|
|
+ cf_values,
|
|
|
|
+ dictionary_size,
|
|
|
|
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks,
|
|
|
|
+ CoreFoundation.kCFTypeDictionaryValueCallBacks,
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+
|
|
|
|
++def _cfstr(py_bstr):
|
|
|
|
++ """
|
|
|
|
++ Given a Python binary data, create a CFString.
|
|
|
|
++ The string must be CFReleased by the caller.
|
|
|
|
++ """
|
|
|
|
++ c_str = ctypes.c_char_p(py_bstr)
|
|
|
|
++ cf_str = CoreFoundation.CFStringCreateWithCString(
|
|
|
|
++ CoreFoundation.kCFAllocatorDefault,
|
|
|
|
++ c_str,
|
|
|
|
++ CFConst.kCFStringEncodingUTF8,
|
|
|
|
++ )
|
|
|
|
++ return cf_str
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++def _create_cfstring_array(lst):
|
|
|
|
++ """
|
|
|
|
++ Given a list of Python binary data, create an associated CFMutableArray.
|
|
|
|
++ The array must be CFReleased by the caller.
|
|
|
|
++
|
|
|
|
++ Raises an ssl.SSLError on failure.
|
|
|
|
++ """
|
|
|
|
++ cf_arr = None
|
|
|
|
++ try:
|
|
|
|
++ cf_arr = CoreFoundation.CFArrayCreateMutable(
|
|
|
|
++ CoreFoundation.kCFAllocatorDefault,
|
|
|
|
++ 0,
|
|
|
|
++ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
|
|
|
|
++ )
|
|
|
|
++ if not cf_arr:
|
|
|
|
++ raise MemoryError("Unable to allocate memory!")
|
|
|
|
++ for item in lst:
|
|
|
|
++ cf_str = _cfstr(item)
|
|
|
|
++ if not cf_str:
|
|
|
|
++ raise MemoryError("Unable to allocate memory!")
|
|
|
|
++ try:
|
|
|
|
++ CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
|
|
|
|
++ finally:
|
|
|
|
++ CoreFoundation.CFRelease(cf_str)
|
|
|
|
++ except BaseException as e:
|
|
|
|
++ if cf_arr:
|
|
|
|
++ CoreFoundation.CFRelease(cf_arr)
|
|
|
|
++ raise ssl.SSLError("Unable to allocate array: %s" % (e,))
|
|
|
|
++ return cf_arr
|
|
|
|
++
|
|
|
|
++
|
|
|
|
+ def _cf_string_to_unicode(value):
|
|
|
|
+ """
|
|
|
|
+ Creates a Unicode string from a CFString object. Used entirely for error
|
|
|
|
+ reporting.
|
|
|
|
+
|
|
|
|
+ Yes, it annoys me quite a lot that this function is this complex.
|
|
|
|
+ """
|
|
|
|
+ value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
|
|
|
|
+@@ -321,8 +366,31 @@ def _load_client_cert_chain(keychain, *p
|
|
|
|
+ # ArrayAppendValue does a CFRetain on the item. That's fine,
|
|
|
|
+ # because the finally block will release our other refs to them.
|
|
|
|
+ CoreFoundation.CFArrayAppendValue(trust_chain, item)
|
|
|
|
+
|
|
|
|
+ return trust_chain
|
|
|
|
+ finally:
|
|
|
|
+ for obj in itertools.chain(identities, certificates):
|
|
|
|
+ CoreFoundation.CFRelease(obj)
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++TLS_PROTOCOL_VERSIONS = {
|
|
|
|
++ "SSLv2": (0, 2),
|
|
|
|
++ "SSLv3": (3, 0),
|
|
|
|
++ "TLSv1": (3, 1),
|
|
|
|
++ "TLSv1.1": (3, 2),
|
|
|
|
++ "TLSv1.2": (3, 3),
|
|
|
|
++}
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++def _build_tls_unknown_ca_alert(version):
|
|
|
|
++ """
|
|
|
|
++ Builds a TLS alert record for an unknown CA.
|
|
|
|
++ """
|
|
|
|
++ ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
|
|
|
|
++ severity_fatal = 0x02
|
|
|
|
++ description_unknown_ca = 0x30
|
|
|
|
++ msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
|
|
|
|
++ msg_len = len(msg)
|
|
|
|
++ record_type_alert = 0x15
|
|
|
|
++ record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
|
|
|
|
++ return record
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/contrib/appengine.py b/third_party/python/urllib3/urllib3/contrib/appengine.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/contrib/appengine.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/contrib/appengine.py
|
|
|
|
+@@ -34,34 +34,34 @@ 2. You can use a normal :class:`~urllib3
|
|
|
|
+ GAE_USE_SOCKETS_HTTPLIB : 'true'
|
|
|
|
+
|
|
|
|
+ 3. If you are using `App Engine Flexible
|
|
|
|
+ <https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
|
|
|
|
+ :class:`PoolManager` without any configuration or special environment variables.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ import io
|
|
|
|
+ import logging
|
|
|
|
+ import warnings
|
|
|
|
+-from ..packages.six.moves.urllib.parse import urljoin
|
|
|
|
+
|
|
|
|
+ from ..exceptions import (
|
|
|
|
+ HTTPError,
|
|
|
|
+ HTTPWarning,
|
|
|
|
+ MaxRetryError,
|
|
|
|
+ ProtocolError,
|
|
|
|
++ SSLError,
|
|
|
|
+ TimeoutError,
|
|
|
|
+- SSLError,
|
|
|
|
+ )
|
|
|
|
+-
|
|
|
|
++from ..packages.six.moves.urllib.parse import urljoin
|
|
|
|
+ from ..request import RequestMethods
|
|
|
|
+ from ..response import HTTPResponse
|
|
|
|
++from ..util.retry import Retry
|
|
|
|
+ from ..util.timeout import Timeout
|
|
|
|
+-from ..util.retry import Retry
|
|
|
|
+ from . import _appengine_environ
|
|
|
|
+
|
|
|
|
+ try:
|
|
|
|
+ from google.appengine.api import urlfetch
|
|
|
|
+ except ImportError:
|
|
|
|
+ urlfetch = None
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+@@ -85,17 +85,17 @@ class AppEngineManager(RequestMethods):
|
|
|
|
+ the App Engine documentation `here
|
|
|
|
+ <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
|
|
|
|
+
|
|
|
|
+ Notably it will raise an :class:`AppEnginePlatformError` if:
|
|
|
|
+ * URLFetch is not available.
|
|
|
|
+ * If you attempt to use this on App Engine Flexible, as full socket
|
|
|
|
+ support is available.
|
|
|
|
+ * If a request size is more than 10 megabytes.
|
|
|
|
+- * If a response size is more than 32 megabtyes.
|
|
|
|
++ * If a response size is more than 32 megabytes.
|
|
|
|
+ * If you use an unsupported request method such as OPTIONS.
|
|
|
|
+
|
|
|
|
+ Beyond those cases, it will raise normal urllib3 errors.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ def __init__(
|
|
|
|
+ self,
|
|
|
|
+ headers=None,
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/contrib/ntlmpool.py b/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
|
|
|
|
+@@ -1,22 +1,22 @@
|
|
|
|
+ """
|
|
|
|
+ NTLM authenticating pool, contributed by erikcederstran
|
|
|
|
+
|
|
|
|
+ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
|
|
|
|
+ """
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+
|
|
|
|
+ from logging import getLogger
|
|
|
|
++
|
|
|
|
+ from ntlm import ntlm
|
|
|
|
+
|
|
|
|
+ from .. import HTTPSConnectionPool
|
|
|
|
+ from ..packages.six.moves.http_client import HTTPSConnection
|
|
|
|
+
|
|
|
|
+-
|
|
|
|
+ log = getLogger(__name__)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class NTLMConnectionPool(HTTPSConnectionPool):
|
|
|
|
+ """
|
|
|
|
+ Implements an NTLM authentication version of an urllib3 connection pool
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/contrib/pyopenssl.py b/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
|
|
|
|
+@@ -1,82 +1,86 @@
|
|
|
|
+ """
|
|
|
|
+-SSL with SNI_-support for Python 2. Follow these instructions if you would
|
|
|
|
+-like to verify SSL certificates in Python 2. Note, the default libraries do
|
|
|
|
++TLS with SNI_-support for Python 2. Follow these instructions if you would
|
|
|
|
++like to verify TLS certificates in Python 2. Note, the default libraries do
|
|
|
|
+ *not* do certificate checking; you need to do additional work to validate
|
|
|
|
+ certificates yourself.
|
|
|
|
+
|
|
|
|
+ This needs the following packages installed:
|
|
|
|
+
|
|
|
|
+-* pyOpenSSL (tested with 16.0.0)
|
|
|
|
+-* cryptography (minimum 1.3.4, from pyopenssl)
|
|
|
|
+-* idna (minimum 2.0, from cryptography)
|
|
|
|
++* `pyOpenSSL`_ (tested with 16.0.0)
|
|
|
|
++* `cryptography`_ (minimum 1.3.4, from pyopenssl)
|
|
|
|
++* `idna`_ (minimum 2.0, from cryptography)
|
|
|
|
+
|
|
|
|
+ However, pyopenssl depends on cryptography, which depends on idna, so while we
|
|
|
|
+ use all three directly here we end up having relatively few packages required.
|
|
|
|
+
|
|
|
|
+ You can install them with the following command:
|
|
|
|
+
|
|
|
|
+- pip install pyopenssl cryptography idna
|
|
|
|
++.. code-block:: bash
|
|
|
|
++
|
|
|
|
++ $ python -m pip install pyopenssl cryptography idna
|
|
|
|
+
|
|
|
|
+ To activate certificate checking, call
|
|
|
|
+ :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
|
|
|
|
+ before you begin making HTTP requests. This can be done in a ``sitecustomize``
|
|
|
|
+ module, or at any other time before your application begins using ``urllib3``,
|
|
|
|
+-like this::
|
|
|
|
++like this:
|
|
|
|
++
|
|
|
|
++.. code-block:: python
|
|
|
|
+
|
|
|
|
+ try:
|
|
|
|
+ import urllib3.contrib.pyopenssl
|
|
|
|
+ urllib3.contrib.pyopenssl.inject_into_urllib3()
|
|
|
|
+ except ImportError:
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+ Now you can use :mod:`urllib3` as you normally would, and it will support SNI
|
|
|
|
+ when the required modules are installed.
|
|
|
|
+
|
|
|
|
+ Activating this module also has the positive side effect of disabling SSL/TLS
|
|
|
|
+ compression in Python 2 (see `CRIME attack`_).
|
|
|
|
+
|
|
|
|
+-If you want to configure the default list of supported cipher suites, you can
|
|
|
|
+-set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
|
|
|
|
+-
|
|
|
|
+ .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
|
|
|
|
+ .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
|
|
|
|
++.. _pyopenssl: https://www.pyopenssl.org
|
|
|
|
++.. _cryptography: https://cryptography.io
|
|
|
|
++.. _idna: https://github.com/kjd/idna
|
|
|
|
+ """
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+
|
|
|
|
+ import OpenSSL.SSL
|
|
|
|
+ from cryptography import x509
|
|
|
|
+ from cryptography.hazmat.backends.openssl import backend as openssl_backend
|
|
|
|
+ from cryptography.hazmat.backends.openssl.x509 import _Certificate
|
|
|
|
+
|
|
|
|
+ try:
|
|
|
|
+ from cryptography.x509 import UnsupportedExtension
|
|
|
|
+ except ImportError:
|
|
|
|
+ # UnsupportedExtension is gone in cryptography >= 2.1.0
|
|
|
|
+ class UnsupportedExtension(Exception):
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+-from socket import timeout, error as SocketError
|
|
|
|
+ from io import BytesIO
|
|
|
|
++from socket import error as SocketError
|
|
|
|
++from socket import timeout
|
|
|
|
+
|
|
|
|
+ try: # Platform-specific: Python 2
|
|
|
|
+ from socket import _fileobject
|
|
|
|
+ except ImportError: # Platform-specific: Python 3
|
|
|
|
+ _fileobject = None
|
|
|
|
+ from ..packages.backports.makefile import backport_makefile
|
|
|
|
+
|
|
|
|
+ import logging
|
|
|
|
+ import ssl
|
|
|
|
+-from ..packages import six
|
|
|
|
+ import sys
|
|
|
|
+
|
|
|
|
+ from .. import util
|
|
|
|
+-
|
|
|
|
++from ..packages import six
|
|
|
|
+
|
|
|
|
+ __all__ = ["inject_into_urllib3", "extract_from_urllib3"]
|
|
|
|
+
|
|
|
|
+ # SNI always works.
|
|
|
|
+ HAS_SNI = True
|
|
|
|
+
|
|
|
|
+ # Map from urllib3 to PyOpenSSL compatible parameter-values.
|
|
|
|
+ _openssl_versions = {
|
|
|
|
+@@ -460,16 +464,20 @@ class PyOpenSSLContext(object):
|
|
|
|
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
|
|
|
|
+ self._ctx.use_certificate_chain_file(certfile)
|
|
|
|
+ if password is not None:
|
|
|
|
+ if not isinstance(password, six.binary_type):
|
|
|
|
+ password = password.encode("utf-8")
|
|
|
|
+ self._ctx.set_passwd_cb(lambda *_: password)
|
|
|
|
+ self._ctx.use_privatekey_file(keyfile or certfile)
|
|
|
|
+
|
|
|
|
++ def set_alpn_protocols(self, protocols):
|
|
|
|
++ protocols = [six.ensure_binary(p) for p in protocols]
|
|
|
|
++ return self._ctx.set_alpn_protos(protocols)
|
|
|
|
++
|
|
|
|
+ def wrap_socket(
|
|
|
|
+ self,
|
|
|
|
+ sock,
|
|
|
|
+ server_side=False,
|
|
|
|
+ do_handshake_on_connect=True,
|
|
|
|
+ suppress_ragged_eofs=True,
|
|
|
|
+ server_hostname=None,
|
|
|
|
+ ):
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/contrib/securetransport.py b/third_party/python/urllib3/urllib3/contrib/securetransport.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/contrib/securetransport.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/contrib/securetransport.py
|
|
|
|
+@@ -24,16 +24,18 @@ To use this module, simply import and in
|
|
|
|
+
|
|
|
|
+ Happy TLSing!
|
|
|
|
+
|
|
|
|
+ This code is a bastardised version of the code found in Will Bond's oscrypto
|
|
|
|
+ library. An enormous debt is owed to him for blazing this trail for us. For
|
|
|
|
+ that reason, this code should be considered to be covered both by urllib3's
|
|
|
|
+ license and by oscrypto's:
|
|
|
|
+
|
|
|
|
++.. code-block::
|
|
|
|
++
|
|
|
|
+ Copyright (c) 2015-2016 Will Bond <will@wbond.net>
|
|
|
|
+
|
|
|
|
+ Permission is hereby granted, free of charge, to any person obtaining a
|
|
|
|
+ copy of this software and associated documentation files (the "Software"),
|
|
|
|
+ to deal in the Software without restriction, including without limitation
|
|
|
|
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
|
|
+ and/or sell copies of the Software, and to permit persons to whom the
|
|
|
|
+ Software is furnished to do so, subject to the following conditions:
|
|
|
|
+@@ -53,26 +55,31 @@ from __future__ import absolute_import
|
|
|
|
+
|
|
|
|
+ import contextlib
|
|
|
|
+ import ctypes
|
|
|
|
+ import errno
|
|
|
|
+ import os.path
|
|
|
|
+ import shutil
|
|
|
|
+ import socket
|
|
|
|
+ import ssl
|
|
|
|
++import struct
|
|
|
|
+ import threading
|
|
|
|
+ import weakref
|
|
|
|
+
|
|
|
|
++import six
|
|
|
|
++
|
|
|
|
+ from .. import util
|
|
|
|
+-from ._securetransport.bindings import Security, SecurityConst, CoreFoundation
|
|
|
|
++from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
|
|
|
|
+ from ._securetransport.low_level import (
|
|
|
|
+ _assert_no_error,
|
|
|
|
++ _build_tls_unknown_ca_alert,
|
|
|
|
+ _cert_array_from_pem,
|
|
|
|
++ _create_cfstring_array,
|
|
|
|
++ _load_client_cert_chain,
|
|
|
|
+ _temporary_keychain,
|
|
|
|
+- _load_client_cert_chain,
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ try: # Platform-specific: Python 2
|
|
|
|
+ from socket import _fileobject
|
|
|
|
+ except ImportError: # Platform-specific: Python 3
|
|
|
|
+ _fileobject = None
|
|
|
|
+ from ..packages.backports.makefile import backport_makefile
|
|
|
|
+
|
|
|
|
+@@ -369,26 +376,65 @@ class WrappedSocket(object):
|
|
|
|
+ OpenSSL cipher strings is going to be a freaking nightmare.
|
|
|
|
+ """
|
|
|
|
+ ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
|
|
|
|
+ result = Security.SSLSetEnabledCiphers(
|
|
|
|
+ self.context, ciphers, len(CIPHER_SUITES)
|
|
|
|
+ )
|
|
|
|
+ _assert_no_error(result)
|
|
|
|
+
|
|
|
|
++ def _set_alpn_protocols(self, protocols):
|
|
|
|
++ """
|
|
|
|
++ Sets up the ALPN protocols on the context.
|
|
|
|
++ """
|
|
|
|
++ if not protocols:
|
|
|
|
++ return
|
|
|
|
++ protocols_arr = _create_cfstring_array(protocols)
|
|
|
|
++ try:
|
|
|
|
++ result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
|
|
|
|
++ _assert_no_error(result)
|
|
|
|
++ finally:
|
|
|
|
++ CoreFoundation.CFRelease(protocols_arr)
|
|
|
|
++
|
|
|
|
+ def _custom_validate(self, verify, trust_bundle):
|
|
|
|
+ """
|
|
|
|
+ Called when we have set custom validation. We do this in two cases:
|
|
|
|
+ first, when cert validation is entirely disabled; and second, when
|
|
|
|
+ using a custom trust DB.
|
|
|
|
++ Raises an SSLError if the connection is not trusted.
|
|
|
|
+ """
|
|
|
|
+ # If we disabled cert validation, just say: cool.
|
|
|
|
+ if not verify:
|
|
|
|
+ return
|
|
|
|
+
|
|
|
|
++ successes = (
|
|
|
|
++ SecurityConst.kSecTrustResultUnspecified,
|
|
|
|
++ SecurityConst.kSecTrustResultProceed,
|
|
|
|
++ )
|
|
|
|
++ try:
|
|
|
|
++ trust_result = self._evaluate_trust(trust_bundle)
|
|
|
|
++ if trust_result in successes:
|
|
|
|
++ return
|
|
|
|
++ reason = "error code: %d" % (trust_result,)
|
|
|
|
++ except Exception as e:
|
|
|
|
++ # Do not trust on error
|
|
|
|
++ reason = "exception: %r" % (e,)
|
|
|
|
++
|
|
|
|
++ # SecureTransport does not send an alert nor shuts down the connection.
|
|
|
|
++ rec = _build_tls_unknown_ca_alert(self.version())
|
|
|
|
++ self.socket.sendall(rec)
|
|
|
|
++ # close the connection immediately
|
|
|
|
++ # l_onoff = 1, activate linger
|
|
|
|
++ # l_linger = 0, linger for 0 seoncds
|
|
|
|
++ opts = struct.pack("ii", 1, 0)
|
|
|
|
++ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
|
|
|
|
++ self.close()
|
|
|
|
++ raise ssl.SSLError("certificate verify failed, %s" % reason)
|
|
|
|
++
|
|
|
|
++ def _evaluate_trust(self, trust_bundle):
|
|
|
|
+ # We want data in memory, so load it up.
|
|
|
|
+ if os.path.isfile(trust_bundle):
|
|
|
|
+ with open(trust_bundle, "rb") as f:
|
|
|
|
+ trust_bundle = f.read()
|
|
|
|
+
|
|
|
|
+ cert_array = None
|
|
|
|
+ trust = Security.SecTrustRef()
|
|
|
|
+
|
|
|
|
+@@ -416,36 +462,29 @@ class WrappedSocket(object):
|
|
|
|
+ _assert_no_error(result)
|
|
|
|
+ finally:
|
|
|
|
+ if trust:
|
|
|
|
+ CoreFoundation.CFRelease(trust)
|
|
|
|
+
|
|
|
|
+ if cert_array is not None:
|
|
|
|
+ CoreFoundation.CFRelease(cert_array)
|
|
|
|
+
|
|
|
|
+- # Ok, now we can look at what the result was.
|
|
|
|
+- successes = (
|
|
|
|
+- SecurityConst.kSecTrustResultUnspecified,
|
|
|
|
+- SecurityConst.kSecTrustResultProceed,
|
|
|
|
+- )
|
|
|
|
+- if trust_result.value not in successes:
|
|
|
|
+- raise ssl.SSLError(
|
|
|
|
+- "certificate verify failed, error code: %d" % trust_result.value
|
|
|
|
+- )
|
|
|
|
++ return trust_result.value
|
|
|
|
+
|
|
|
|
+ def handshake(
|
|
|
|
+ self,
|
|
|
|
+ server_hostname,
|
|
|
|
+ verify,
|
|
|
|
+ trust_bundle,
|
|
|
|
+ min_version,
|
|
|
|
+ max_version,
|
|
|
|
+ client_cert,
|
|
|
|
+ client_key,
|
|
|
|
+ client_key_passphrase,
|
|
|
|
++ alpn_protocols,
|
|
|
|
+ ):
|
|
|
|
+ """
|
|
|
|
+ Actually performs the TLS handshake. This is run automatically by
|
|
|
|
+ wrapped socket, and shouldn't be needed in user code.
|
|
|
|
+ """
|
|
|
|
+ # First, we do the initial bits of connection setup. We need to create
|
|
|
|
+ # a context, set its I/O funcs, and set the connection reference.
|
|
|
|
+ self.context = Security.SSLCreateContext(
|
|
|
|
+@@ -476,16 +515,19 @@ class WrappedSocket(object):
|
|
|
|
+ result = Security.SSLSetPeerDomainName(
|
|
|
|
+ self.context, server_hostname, len(server_hostname)
|
|
|
|
+ )
|
|
|
|
+ _assert_no_error(result)
|
|
|
|
+
|
|
|
|
+ # Setup the ciphers.
|
|
|
|
+ self._set_ciphers()
|
|
|
|
+
|
|
|
|
++ # Setup the ALPN protocols.
|
|
|
|
++ self._set_alpn_protocols(alpn_protocols)
|
|
|
|
++
|
|
|
|
+ # Set the minimum and maximum TLS versions.
|
|
|
|
+ result = Security.SSLSetProtocolVersionMin(self.context, min_version)
|
|
|
|
+ _assert_no_error(result)
|
|
|
|
+
|
|
|
|
+ result = Security.SSLSetProtocolVersionMax(self.context, max_version)
|
|
|
|
+ _assert_no_error(result)
|
|
|
|
+
|
|
|
|
+ # If there's a trust DB, we need to use it. We do that by telling
|
|
|
|
+@@ -749,16 +791,17 @@ class SecureTransportContext(object):
|
|
|
|
+ def __init__(self, protocol):
|
|
|
|
+ self._min_version, self._max_version = _protocol_to_min_max[protocol]
|
|
|
|
+ self._options = 0
|
|
|
|
+ self._verify = False
|
|
|
|
+ self._trust_bundle = None
|
|
|
|
+ self._client_cert = None
|
|
|
|
+ self._client_key = None
|
|
|
|
+ self._client_key_passphrase = None
|
|
|
|
++ self._alpn_protocols = None
|
|
|
|
+
|
|
|
|
+ @property
|
|
|
|
+ def check_hostname(self):
|
|
|
|
+ """
|
|
|
|
+ SecureTransport cannot have its hostname checking disabled. For more,
|
|
|
|
+ see the comment on getpeercert() in this file.
|
|
|
|
+ """
|
|
|
|
+ return True
|
|
|
|
+@@ -826,16 +869,28 @@ class SecureTransportContext(object):
|
|
|
|
+
|
|
|
|
+ self._trust_bundle = cafile or cadata
|
|
|
|
+
|
|
|
|
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
|
|
|
|
+ self._client_cert = certfile
|
|
|
|
+ self._client_key = keyfile
|
|
|
|
+ self._client_cert_passphrase = password
|
|
|
|
+
|
|
|
|
++ def set_alpn_protocols(self, protocols):
|
|
|
|
++ """
|
|
|
|
++ Sets the ALPN protocols that will later be set on the context.
|
|
|
|
++
|
|
|
|
++ Raises a NotImplementedError if ALPN is not supported.
|
|
|
|
++ """
|
|
|
|
++ if not hasattr(Security, "SSLSetALPNProtocols"):
|
|
|
|
++ raise NotImplementedError(
|
|
|
|
++ "SecureTransport supports ALPN only in macOS 10.12+"
|
|
|
|
++ )
|
|
|
|
++ self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
|
|
|
|
++
|
|
|
|
+ def wrap_socket(
|
|
|
|
+ self,
|
|
|
|
+ sock,
|
|
|
|
+ server_side=False,
|
|
|
|
+ do_handshake_on_connect=True,
|
|
|
|
+ suppress_ragged_eofs=True,
|
|
|
|
+ server_hostname=None,
|
|
|
|
+ ):
|
|
|
|
+@@ -855,10 +910,11 @@ class SecureTransportContext(object):
|
|
|
|
+ server_hostname,
|
|
|
|
+ self._verify,
|
|
|
|
+ self._trust_bundle,
|
|
|
|
+ self._min_version,
|
|
|
|
+ self._max_version,
|
|
|
|
+ self._client_cert,
|
|
|
|
+ self._client_key,
|
|
|
|
+ self._client_key_passphrase,
|
|
|
|
++ self._alpn_protocols,
|
|
|
|
+ )
|
|
|
|
+ return wrapped_socket
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/contrib/socks.py b/third_party/python/urllib3/urllib3/contrib/socks.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/contrib/socks.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/contrib/socks.py
|
|
|
|
+@@ -9,55 +9,61 @@ The SOCKS implementation supports the fu
|
|
|
|
+ supports the following SOCKS features:
|
|
|
|
+
|
|
|
|
+ - SOCKS4A (``proxy_url='socks4a://...``)
|
|
|
|
+ - SOCKS4 (``proxy_url='socks4://...``)
|
|
|
|
+ - SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
|
|
|
|
+ - SOCKS5 with local DNS (``proxy_url='socks5://...``)
|
|
|
|
+ - Usernames and passwords for the SOCKS proxy
|
|
|
|
+
|
|
|
|
+- .. note::
|
|
|
|
+- It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
|
|
|
+- your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
|
|
|
+- server instead of client-side when connecting to a domain name.
|
|
|
|
++.. note::
|
|
|
|
++ It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
|
|
|
|
++ your ``proxy_url`` to ensure that DNS resolution is done from the remote
|
|
|
|
++ server instead of client-side when connecting to a domain name.
|
|
|
|
+
|
|
|
|
+ SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
|
|
|
|
+ supports IPv4, IPv6, and domain names.
|
|
|
|
+
|
|
|
|
+ When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
|
|
|
|
+-will be sent as the ``userid`` section of the SOCKS request::
|
|
|
|
++will be sent as the ``userid`` section of the SOCKS request:
|
|
|
|
++
|
|
|
|
++.. code-block:: python
|
|
|
|
+
|
|
|
|
+ proxy_url="socks4a://<userid>@proxy-host"
|
|
|
|
+
|
|
|
|
+ When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
|
|
|
|
+ of the ``proxy_url`` will be sent as the username/password to authenticate
|
|
|
|
+-with the proxy::
|
|
|
|
++with the proxy:
|
|
|
|
++
|
|
|
|
++.. code-block:: python
|
|
|
|
+
|
|
|
|
+ proxy_url="socks5h://<username>:<password>@proxy-host"
|
|
|
|
+
|
|
|
|
+ """
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+
|
|
|
|
+ try:
|
|
|
|
+ import socks
|
|
|
|
+ except ImportError:
|
|
|
|
+ import warnings
|
|
|
|
++
|
|
|
|
+ from ..exceptions import DependencyWarning
|
|
|
|
+
|
|
|
|
+ warnings.warn(
|
|
|
|
+ (
|
|
|
|
+ "SOCKS support in urllib3 requires the installation of optional "
|
|
|
|
+ "dependencies: specifically, PySocks. For more information, see "
|
|
|
|
+ "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies"
|
|
|
|
+ ),
|
|
|
|
+ DependencyWarning,
|
|
|
|
+ )
|
|
|
|
+ raise
|
|
|
|
+
|
|
|
|
+-from socket import error as SocketError, timeout as SocketTimeout
|
|
|
|
++from socket import error as SocketError
|
|
|
|
++from socket import timeout as SocketTimeout
|
|
|
|
+
|
|
|
|
+ from ..connection import HTTPConnection, HTTPSConnection
|
|
|
|
+ from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
|
|
|
+ from ..exceptions import ConnectTimeoutError, NewConnectionError
|
|
|
|
+ from ..poolmanager import PoolManager
|
|
|
|
+ from ..util.url import parse_url
|
|
|
|
+
|
|
|
|
+ try:
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/exceptions.py b/third_party/python/urllib3/urllib3/exceptions.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/exceptions.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/exceptions.py
|
|
|
|
+@@ -1,68 +1,74 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
|
|
|
|
+
|
|
|
|
+ # Base Exceptions
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class HTTPError(Exception):
|
|
|
|
+- "Base exception used by this module."
|
|
|
|
++ """Base exception used by this module."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class HTTPWarning(Warning):
|
|
|
|
+- "Base warning used by this module."
|
|
|
|
++ """Base warning used by this module."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class PoolError(HTTPError):
|
|
|
|
+- "Base exception for errors caused within a pool."
|
|
|
|
++ """Base exception for errors caused within a pool."""
|
|
|
|
+
|
|
|
|
+ def __init__(self, pool, message):
|
|
|
|
+ self.pool = pool
|
|
|
|
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
|
|
|
|
+
|
|
|
|
+ def __reduce__(self):
|
|
|
|
+ # For pickling purposes.
|
|
|
|
+ return self.__class__, (None, None)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class RequestError(PoolError):
|
|
|
|
+- "Base exception for PoolErrors that have associated URLs."
|
|
|
|
++ """Base exception for PoolErrors that have associated URLs."""
|
|
|
|
+
|
|
|
|
+ def __init__(self, pool, url, message):
|
|
|
|
+ self.url = url
|
|
|
|
+ PoolError.__init__(self, pool, message)
|
|
|
|
+
|
|
|
|
+ def __reduce__(self):
|
|
|
|
+ # For pickling purposes.
|
|
|
|
+ return self.__class__, (None, self.url, None)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class SSLError(HTTPError):
|
|
|
|
+- "Raised when SSL certificate fails in an HTTPS connection."
|
|
|
|
++ """Raised when SSL certificate fails in an HTTPS connection."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class ProxyError(HTTPError):
|
|
|
|
+- "Raised when the connection to a proxy fails."
|
|
|
|
++ """Raised when the connection to a proxy fails."""
|
|
|
|
+
|
|
|
|
+ def __init__(self, message, error, *args):
|
|
|
|
+ super(ProxyError, self).__init__(message, error, *args)
|
|
|
|
+ self.original_error = error
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class DecodeError(HTTPError):
|
|
|
|
+- "Raised when automatic decoding based on Content-Type fails."
|
|
|
|
++ """Raised when automatic decoding based on Content-Type fails."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class ProtocolError(HTTPError):
|
|
|
|
+- "Raised when something unexpected happens mid-request/response."
|
|
|
|
++ """Raised when something unexpected happens mid-request/response."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ #: Renamed to ProtocolError but aliased for backwards compatibility.
|
|
|
|
+ ConnectionError = ProtocolError
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ # Leaf Exceptions
|
|
|
|
+@@ -82,191 +88,226 @@ class MaxRetryError(RequestError):
|
|
|
|
+ self.reason = reason
|
|
|
|
+
|
|
|
|
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
|
|
|
|
+
|
|
|
|
+ RequestError.__init__(self, pool, url, message)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class HostChangedError(RequestError):
|
|
|
|
+- "Raised when an existing pool gets a request for a foreign host."
|
|
|
|
++ """Raised when an existing pool gets a request for a foreign host."""
|
|
|
|
+
|
|
|
|
+ def __init__(self, pool, url, retries=3):
|
|
|
|
+ message = "Tried to open a foreign host with url: %s" % url
|
|
|
|
+ RequestError.__init__(self, pool, url, message)
|
|
|
|
+ self.retries = retries
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class TimeoutStateError(HTTPError):
|
|
|
|
+- """ Raised when passing an invalid state to a timeout """
|
|
|
|
++ """Raised when passing an invalid state to a timeout"""
|
|
|
|
+
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class TimeoutError(HTTPError):
|
|
|
|
+- """ Raised when a socket timeout error occurs.
|
|
|
|
++ """Raised when a socket timeout error occurs.
|
|
|
|
+
|
|
|
|
+ Catching this error will catch both :exc:`ReadTimeoutErrors
|
|
|
|
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class ReadTimeoutError(TimeoutError, RequestError):
|
|
|
|
+- "Raised when a socket timeout occurs while receiving data from a server"
|
|
|
|
++ """Raised when a socket timeout occurs while receiving data from a server"""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ # This timeout error does not have a URL attached and needs to inherit from the
|
|
|
|
+ # base HTTPError
|
|
|
|
+ class ConnectTimeoutError(TimeoutError):
|
|
|
|
+- "Raised when a socket timeout occurs while connecting to a server"
|
|
|
|
++ """Raised when a socket timeout occurs while connecting to a server"""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class NewConnectionError(ConnectTimeoutError, PoolError):
|
|
|
|
+- "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
|
|
|
|
++ """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class EmptyPoolError(PoolError):
|
|
|
|
+- "Raised when a pool runs out of connections and no more are allowed."
|
|
|
|
++ """Raised when a pool runs out of connections and no more are allowed."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class ClosedPoolError(PoolError):
|
|
|
|
+- "Raised when a request enters a pool after the pool has been closed."
|
|
|
|
++ """Raised when a request enters a pool after the pool has been closed."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class LocationValueError(ValueError, HTTPError):
|
|
|
|
+- "Raised when there is something wrong with a given URL input."
|
|
|
|
++ """Raised when there is something wrong with a given URL input."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class LocationParseError(LocationValueError):
|
|
|
|
+- "Raised when get_host or similar fails to parse the URL input."
|
|
|
|
++ """Raised when get_host or similar fails to parse the URL input."""
|
|
|
|
+
|
|
|
|
+ def __init__(self, location):
|
|
|
|
+ message = "Failed to parse: %s" % location
|
|
|
|
+ HTTPError.__init__(self, message)
|
|
|
|
+
|
|
|
|
+ self.location = location
|
|
|
|
+
|
|
|
|
+
|
|
|
|
++class URLSchemeUnknown(LocationValueError):
|
|
|
|
++ """Raised when a URL input has an unsupported scheme."""
|
|
|
|
++
|
|
|
|
++ def __init__(self, scheme):
|
|
|
|
++ message = "Not supported URL scheme %s" % scheme
|
|
|
|
++ super(URLSchemeUnknown, self).__init__(message)
|
|
|
|
++
|
|
|
|
++ self.scheme = scheme
|
|
|
|
++
|
|
|
|
++
|
|
|
|
+ class ResponseError(HTTPError):
|
|
|
|
+- "Used as a container for an error reason supplied in a MaxRetryError."
|
|
|
|
++ """Used as a container for an error reason supplied in a MaxRetryError."""
|
|
|
|
++
|
|
|
|
+ GENERIC_ERROR = "too many error responses"
|
|
|
|
+ SPECIFIC_ERROR = "too many {status_code} error responses"
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class SecurityWarning(HTTPWarning):
|
|
|
|
+- "Warned when performing security reducing actions"
|
|
|
|
++ """Warned when performing security reducing actions"""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class SubjectAltNameWarning(SecurityWarning):
|
|
|
|
+- "Warned when connecting to a host with a certificate missing a SAN."
|
|
|
|
++ """Warned when connecting to a host with a certificate missing a SAN."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class InsecureRequestWarning(SecurityWarning):
|
|
|
|
+- "Warned when making an unverified HTTPS request."
|
|
|
|
++ """Warned when making an unverified HTTPS request."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class SystemTimeWarning(SecurityWarning):
|
|
|
|
+- "Warned when system time is suspected to be wrong"
|
|
|
|
++ """Warned when system time is suspected to be wrong"""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class InsecurePlatformWarning(SecurityWarning):
|
|
|
|
+- "Warned when certain SSL configuration is not available on a platform."
|
|
|
|
++ """Warned when certain TLS/SSL configuration is not available on a platform."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class SNIMissingWarning(HTTPWarning):
|
|
|
|
+- "Warned when making a HTTPS request without SNI available."
|
|
|
|
++ """Warned when making a HTTPS request without SNI available."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class DependencyWarning(HTTPWarning):
|
|
|
|
+ """
|
|
|
|
+ Warned when an attempt is made to import a module with missing optional
|
|
|
|
+ dependencies.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+-class InvalidProxyConfigurationWarning(HTTPWarning):
|
|
|
|
+- """
|
|
|
|
+- Warned when using an HTTPS proxy and an HTTPS URL. Currently
|
|
|
|
+- urllib3 doesn't support HTTPS proxies and the proxy will be
|
|
|
|
+- contacted via HTTP instead. This warning can be fixed by
|
|
|
|
+- changing your HTTPS proxy URL into an HTTP proxy URL.
|
|
|
|
++class ResponseNotChunked(ProtocolError, ValueError):
|
|
|
|
++ """Response needs to be chunked in order to read it as chunks."""
|
|
|
|
+
|
|
|
|
+- If you encounter this warning read this:
|
|
|
|
+- https://github.com/urllib3/urllib3/issues/1850
|
|
|
|
+- """
|
|
|
|
+-
|
|
|
|
+- pass
|
|
|
|
+-
|
|
|
|
+-
|
|
|
|
+-class ResponseNotChunked(ProtocolError, ValueError):
|
|
|
|
+- "Response needs to be chunked in order to read it as chunks."
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class BodyNotHttplibCompatible(HTTPError):
|
|
|
|
+ """
|
|
|
|
+- Body should be httplib.HTTPResponse like (have an fp attribute which
|
|
|
|
+- returns raw chunks) for read_chunked().
|
|
|
|
++ Body should be :class:`http.client.HTTPResponse` like
|
|
|
|
++ (have an fp attribute which returns raw chunks) for read_chunked().
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class IncompleteRead(HTTPError, httplib_IncompleteRead):
|
|
|
|
+ """
|
|
|
|
+ Response length doesn't match expected Content-Length
|
|
|
|
+
|
|
|
|
+- Subclass of http_client.IncompleteRead to allow int value
|
|
|
|
+- for `partial` to avoid creating large objects on streamed
|
|
|
|
+- reads.
|
|
|
|
++ Subclass of :class:`http.client.IncompleteRead` to allow int value
|
|
|
|
++ for ``partial`` to avoid creating large objects on streamed reads.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ def __init__(self, partial, expected):
|
|
|
|
+ super(IncompleteRead, self).__init__(partial, expected)
|
|
|
|
+
|
|
|
|
+ def __repr__(self):
|
|
|
|
+ return "IncompleteRead(%i bytes read, %i more expected)" % (
|
|
|
|
+ self.partial,
|
|
|
|
+ self.expected,
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+
|
|
|
|
++class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
|
|
|
|
++ """Invalid chunk length in a chunked response."""
|
|
|
|
++
|
|
|
|
++ def __init__(self, response, length):
|
|
|
|
++ super(InvalidChunkLength, self).__init__(
|
|
|
|
++ response.tell(), response.length_remaining
|
|
|
|
++ )
|
|
|
|
++ self.response = response
|
|
|
|
++ self.length = length
|
|
|
|
++
|
|
|
|
++ def __repr__(self):
|
|
|
|
++ return "InvalidChunkLength(got length %r, %i bytes read)" % (
|
|
|
|
++ self.length,
|
|
|
|
++ self.partial,
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++
|
|
|
|
+ class InvalidHeader(HTTPError):
|
|
|
|
+- "The header provided was somehow invalid."
|
|
|
|
++ """The header provided was somehow invalid."""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+-class ProxySchemeUnknown(AssertionError, ValueError):
|
|
|
|
+- "ProxyManager does not support the supplied scheme"
|
|
|
|
++class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
|
|
|
|
++ """ProxyManager does not support the supplied scheme"""
|
|
|
|
++
|
|
|
|
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
|
|
|
|
+
|
|
|
|
+ def __init__(self, scheme):
|
|
|
|
+ message = "Not supported proxy scheme %s" % scheme
|
|
|
|
+ super(ProxySchemeUnknown, self).__init__(message)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
++class ProxySchemeUnsupported(ValueError):
|
|
|
|
++ """Fetching HTTPS resources through HTTPS proxies is unsupported"""
|
|
|
|
++
|
|
|
|
++ pass
|
|
|
|
++
|
|
|
|
++
|
|
|
|
+ class HeaderParsingError(HTTPError):
|
|
|
|
+- "Raised by assert_header_parsing, but we convert it to a log.warning statement."
|
|
|
|
++ """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
|
|
|
|
+
|
|
|
|
+ def __init__(self, defects, unparsed_data):
|
|
|
|
+ message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
|
|
|
|
+ super(HeaderParsingError, self).__init__(message)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class UnrewindableBodyError(HTTPError):
|
|
|
|
+- "urllib3 encountered an error when trying to rewind a body"
|
|
|
|
++ """urllib3 encountered an error when trying to rewind a body"""
|
|
|
|
++
|
|
|
|
+ pass
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/fields.py b/third_party/python/urllib3/urllib3/fields.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/fields.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/fields.py
|
|
|
|
+@@ -1,9 +1,10 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ import email.utils
|
|
|
|
+ import mimetypes
|
|
|
|
+ import re
|
|
|
|
+
|
|
|
|
+ from .packages import six
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def guess_content_type(filename, default="application/octet-stream"):
|
|
|
|
+@@ -21,17 +22,18 @@ def guess_content_type(filename, default
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def format_header_param_rfc2231(name, value):
|
|
|
|
+ """
|
|
|
|
+ Helper function to format and quote a single header parameter using the
|
|
|
|
+ strategy defined in RFC 2231.
|
|
|
|
+
|
|
|
|
+ Particularly useful for header parameters which might contain
|
|
|
|
+- non-ASCII values, like file names. This follows RFC 2388 Section 4.4.
|
|
|
|
++ non-ASCII values, like file names. This follows
|
|
|
|
++ `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
|
|
|
|
+
|
|
|
|
+ :param name:
|
|
|
|
+ The name of the parameter, a string expected to be ASCII only.
|
|
|
|
+ :param value:
|
|
|
|
+ The value of the parameter, provided as ``bytes`` or `str``.
|
|
|
|
+ :ret:
|
|
|
|
+ An RFC-2231-formatted unicode string.
|
|
|
|
+ """
|
|
|
|
+@@ -60,17 +62,16 @@ def format_header_param_rfc2231(name, va
|
|
|
|
+
|
|
|
|
+ return value
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ _HTML5_REPLACEMENTS = {
|
|
|
|
+ u"\u0022": u"%22",
|
|
|
|
+ # Replace "\" with "\\".
|
|
|
|
+ u"\u005C": u"\u005C\u005C",
|
|
|
|
+- u"\u005C": u"\u005C\u005C",
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ # All control characters from 0x00 to 0x1F *except* 0x1B.
|
|
|
|
+ _HTML5_REPLACEMENTS.update(
|
|
|
|
+ {
|
|
|
|
+ six.unichr(cc): u"%{:02X}".format(cc)
|
|
|
|
+ for cc in range(0x00, 0x1F + 1)
|
|
|
|
+ if cc not in (0x1B,)
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/filepost.py b/third_party/python/urllib3/urllib3/filepost.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/filepost.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/filepost.py
|
|
|
|
+@@ -1,18 +1,18 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ import binascii
|
|
|
|
+ import codecs
|
|
|
|
+ import os
|
|
|
|
+-
|
|
|
|
+ from io import BytesIO
|
|
|
|
+
|
|
|
|
++from .fields import RequestField
|
|
|
|
+ from .packages import six
|
|
|
|
+ from .packages.six import b
|
|
|
|
+-from .fields import RequestField
|
|
|
|
+
|
|
|
|
+ writer = codecs.lookup("utf-8")[3]
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def choose_boundary():
|
|
|
|
+ """
|
|
|
|
+ Our embarrassingly-simple replacement for mimetools.choose_boundary.
|
|
|
|
+ """
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/packages/backports/makefile.py b/third_party/python/urllib3/urllib3/packages/backports/makefile.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/packages/backports/makefile.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/packages/backports/makefile.py
|
|
|
|
+@@ -2,17 +2,16 @@
|
|
|
|
+ """
|
|
|
|
+ backports.makefile
|
|
|
|
+ ~~~~~~~~~~~~~~~~~~
|
|
|
|
+
|
|
|
|
+ Backports the Python 3 ``socket.makefile`` method for use with anything that
|
|
|
|
+ wants to create a "fake" socket object.
|
|
|
|
+ """
|
|
|
|
+ import io
|
|
|
|
+-
|
|
|
|
+ from socket import SocketIO
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def backport_makefile(
|
|
|
|
+ self, mode="r", buffering=None, encoding=None, errors=None, newline=None
|
|
|
|
+ ):
|
|
|
|
+ """
|
|
|
|
+ Backport of ``socket.makefile`` from Python 3.5.
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py b/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
|
|
|
|
+@@ -5,15 +5,18 @@ try:
|
|
|
|
+ # import the match_hostname function if it's at least that good.
|
|
|
|
+ if sys.version_info < (3, 5):
|
|
|
|
+ raise ImportError("Fallback to vendored code")
|
|
|
|
+
|
|
|
|
+ from ssl import CertificateError, match_hostname
|
|
|
|
+ except ImportError:
|
|
|
|
+ try:
|
|
|
|
+ # Backport of the function from a pypi module
|
|
|
|
+- from backports.ssl_match_hostname import CertificateError, match_hostname
|
|
|
|
++ from backports.ssl_match_hostname import ( # type: ignore
|
|
|
|
++ CertificateError,
|
|
|
|
++ match_hostname,
|
|
|
|
++ )
|
|
|
|
+ except ImportError:
|
|
|
|
+ # Our vendored copy
|
|
|
|
+- from ._implementation import CertificateError, match_hostname
|
|
|
|
++ from ._implementation import CertificateError, match_hostname # type: ignore
|
|
|
|
+
|
|
|
|
+ # Not needed, but documenting what we provide.
|
|
|
|
+ __all__ = ("CertificateError", "match_hostname")
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/poolmanager.py b/third_party/python/urllib3/urllib3/poolmanager.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/poolmanager.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/poolmanager.py
|
|
|
|
+@@ -1,29 +1,29 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ import collections
|
|
|
|
+ import functools
|
|
|
|
+ import logging
|
|
|
|
+-import warnings
|
|
|
|
+
|
|
|
|
+ from ._collections import RecentlyUsedContainer
|
|
|
|
+-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
|
|
|
|
+-from .connectionpool import port_by_scheme
|
|
|
|
++from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
|
|
|
|
+ from .exceptions import (
|
|
|
|
+ LocationValueError,
|
|
|
|
+ MaxRetryError,
|
|
|
|
+ ProxySchemeUnknown,
|
|
|
|
+- InvalidProxyConfigurationWarning,
|
|
|
|
++ ProxySchemeUnsupported,
|
|
|
|
++ URLSchemeUnknown,
|
|
|
|
+ )
|
|
|
|
+ from .packages import six
|
|
|
|
+ from .packages.six.moves.urllib.parse import urljoin
|
|
|
|
+ from .request import RequestMethods
|
|
|
|
+-from .util.url import parse_url
|
|
|
|
++from .util.proxy import connection_requires_http_tunnel
|
|
|
|
+ from .util.retry import Retry
|
|
|
|
+-
|
|
|
|
++from .util.url import parse_url
|
|
|
|
+
|
|
|
|
+ __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ log = logging.getLogger(__name__)
|
|
|
|
+
|
|
|
|
+ SSL_KEYWORDS = (
|
|
|
|
+ "key_file",
|
|
|
|
+@@ -54,27 +54,31 @@ SSL_KEYWORDS = (
|
|
|
|
+ "key_ca_certs", # str
|
|
|
|
+ "key_ssl_version", # str
|
|
|
|
+ "key_ca_cert_dir", # str
|
|
|
|
+ "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
|
|
|
|
+ "key_maxsize", # int
|
|
|
|
+ "key_headers", # dict
|
|
|
|
+ "key__proxy", # parsed proxy url
|
|
|
|
+ "key__proxy_headers", # dict
|
|
|
|
++ "key__proxy_config", # class
|
|
|
|
+ "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples
|
|
|
|
+ "key__socks_options", # dict
|
|
|
|
+ "key_assert_hostname", # bool or string
|
|
|
|
+ "key_assert_fingerprint", # str
|
|
|
|
+ "key_server_hostname", # str
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ #: The namedtuple class used to construct keys for the connection pool.
|
|
|
|
+ #: All custom key schemes should include the fields in this key at a minimum.
|
|
|
|
+ PoolKey = collections.namedtuple("PoolKey", _key_fields)
|
|
|
|
+
|
|
|
|
++_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
|
|
|
|
++ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
|
|
|
|
++
|
|
|
|
+
|
|
|
|
+ def _default_key_normalizer(key_class, request_context):
|
|
|
|
+ """
|
|
|
|
+ Create a pool key out of a request context dictionary.
|
|
|
|
+
|
|
|
|
+ According to RFC 3986, both the scheme and host are case-insensitive.
|
|
|
|
+ Therefore, this function normalizes both before constructing the pool
|
|
|
|
+ key for an HTTPS request. If you wish to change this behaviour, provide
|
|
|
|
+@@ -156,16 +160,17 @@ class PoolManager(RequestMethods):
|
|
|
|
+ >>> r = manager.request('GET', 'http://google.com/mail')
|
|
|
|
+ >>> r = manager.request('GET', 'http://yahoo.com/')
|
|
|
|
+ >>> len(manager.pools)
|
|
|
|
+ 2
|
|
|
|
+
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ proxy = None
|
|
|
|
++ proxy_config = None
|
|
|
|
+
|
|
|
|
+ def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
|
|
|
|
+ RequestMethods.__init__(self, headers)
|
|
|
|
+ self.connection_pool_kw = connection_pool_kw
|
|
|
|
+ self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
|
|
|
|
+
|
|
|
|
+ # Locally set the pool classes and keys so other PoolManagers can
|
|
|
|
+ # override them.
|
|
|
|
+@@ -177,17 +182,17 @@ class PoolManager(RequestMethods):
|
|
|
|
+
|
|
|
|
+ def __exit__(self, exc_type, exc_val, exc_tb):
|
|
|
|
+ self.clear()
|
|
|
|
+ # Return False to re-raise any potential exceptions
|
|
|
|
+ return False
|
|
|
|
+
|
|
|
|
+ def _new_pool(self, scheme, host, port, request_context=None):
|
|
|
|
+ """
|
|
|
|
+- Create a new :class:`ConnectionPool` based on host, port, scheme, and
|
|
|
|
++ Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
|
|
|
|
+ any additional pool keyword arguments.
|
|
|
|
+
|
|
|
|
+ If ``request_context`` is provided, it is provided as keyword arguments
|
|
|
|
+ to the pool class used. This method is used to actually create the
|
|
|
|
+ connection pools handed out by :meth:`connection_from_url` and
|
|
|
|
+ companion methods. It is intended to be overridden for customization.
|
|
|
|
+ """
|
|
|
|
+ pool_cls = self.pool_classes_by_scheme[scheme]
|
|
|
|
+@@ -213,17 +218,17 @@ class PoolManager(RequestMethods):
|
|
|
|
+
|
|
|
|
+ This will not affect in-flight connections, but they will not be
|
|
|
|
+ re-used after completion.
|
|
|
|
+ """
|
|
|
|
+ self.pools.clear()
|
|
|
|
+
|
|
|
|
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
|
|
|
|
+ """
|
|
|
|
+- Get a :class:`ConnectionPool` based on the host, port, and scheme.
|
|
|
|
++ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
|
|
|
|
+
|
|
|
|
+ If ``port`` isn't given, it will be derived from the ``scheme`` using
|
|
|
|
+ ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
|
|
|
|
+ provided, it is merged with the instance's ``connection_pool_kw``
|
|
|
|
+ variable and used to create the new connection pool, if one is
|
|
|
|
+ needed.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+@@ -236,30 +241,32 @@ class PoolManager(RequestMethods):
|
|
|
|
+ port = port_by_scheme.get(request_context["scheme"].lower(), 80)
|
|
|
|
+ request_context["port"] = port
|
|
|
|
+ request_context["host"] = host
|
|
|
|
+
|
|
|
|
+ return self.connection_from_context(request_context)
|
|
|
|
+
|
|
|
|
+ def connection_from_context(self, request_context):
|
|
|
|
+ """
|
|
|
|
+- Get a :class:`ConnectionPool` based on the request context.
|
|
|
|
++ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
|
|
|
|
+
|
|
|
|
+ ``request_context`` must at least contain the ``scheme`` key and its
|
|
|
|
+ value must be a key in ``key_fn_by_scheme`` instance variable.
|
|
|
|
+ """
|
|
|
|
+ scheme = request_context["scheme"].lower()
|
|
|
|
+- pool_key_constructor = self.key_fn_by_scheme[scheme]
|
|
|
|
++ pool_key_constructor = self.key_fn_by_scheme.get(scheme)
|
|
|
|
++ if not pool_key_constructor:
|
|
|
|
++ raise URLSchemeUnknown(scheme)
|
|
|
|
+ pool_key = pool_key_constructor(request_context)
|
|
|
|
+
|
|
|
|
+ return self.connection_from_pool_key(pool_key, request_context=request_context)
|
|
|
|
+
|
|
|
|
+ def connection_from_pool_key(self, pool_key, request_context=None):
|
|
|
|
+ """
|
|
|
|
+- Get a :class:`ConnectionPool` based on the provided pool key.
|
|
|
|
++ Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
|
|
|
|
+
|
|
|
|
+ ``pool_key`` should be a namedtuple that only contains immutable
|
|
|
|
+ objects. At a minimum it must have the ``scheme``, ``host``, and
|
|
|
|
+ ``port`` fields.
|
|
|
|
+ """
|
|
|
|
+ with self.pools.lock:
|
|
|
|
+ # If the scheme, host, or port doesn't match existing open
|
|
|
|
+ # connections, open a new ConnectionPool.
|
|
|
|
+@@ -307,35 +314,67 @@ class PoolManager(RequestMethods):
|
|
|
|
+ try:
|
|
|
|
+ del base_pool_kwargs[key]
|
|
|
|
+ except KeyError:
|
|
|
|
+ pass
|
|
|
|
+ else:
|
|
|
|
+ base_pool_kwargs[key] = value
|
|
|
|
+ return base_pool_kwargs
|
|
|
|
+
|
|
|
|
++ def _proxy_requires_url_absolute_form(self, parsed_url):
|
|
|
|
++ """
|
|
|
|
++ Indicates if the proxy requires the complete destination URL in the
|
|
|
|
++ request. Normally this is only needed when not using an HTTP CONNECT
|
|
|
|
++ tunnel.
|
|
|
|
++ """
|
|
|
|
++ if self.proxy is None:
|
|
|
|
++ return False
|
|
|
|
++
|
|
|
|
++ return not connection_requires_http_tunnel(
|
|
|
|
++ self.proxy, self.proxy_config, parsed_url.scheme
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++ def _validate_proxy_scheme_url_selection(self, url_scheme):
|
|
|
|
++ """
|
|
|
|
++ Validates that were not attempting to do TLS in TLS connections on
|
|
|
|
++ Python2 or with unsupported SSL implementations.
|
|
|
|
++ """
|
|
|
|
++ if self.proxy is None or url_scheme != "https":
|
|
|
|
++ return
|
|
|
|
++
|
|
|
|
++ if self.proxy.scheme != "https":
|
|
|
|
++ return
|
|
|
|
++
|
|
|
|
++ if six.PY2 and not self.proxy_config.use_forwarding_for_https:
|
|
|
|
++ raise ProxySchemeUnsupported(
|
|
|
|
++ "Contacting HTTPS destinations through HTTPS proxies "
|
|
|
|
++ "'via CONNECT tunnels' is not supported in Python 2"
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
+ def urlopen(self, method, url, redirect=True, **kw):
|
|
|
|
+ """
|
|
|
|
+- Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
|
|
|
|
++ Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
|
|
|
|
+ with custom cross-host redirect logic and only sends the request-uri
|
|
|
|
+ portion of the ``url``.
|
|
|
|
+
|
|
|
|
+ The given ``url`` parameter must be absolute, such that an appropriate
|
|
|
|
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
|
|
|
|
+ """
|
|
|
|
+ u = parse_url(url)
|
|
|
|
++ self._validate_proxy_scheme_url_selection(u.scheme)
|
|
|
|
++
|
|
|
|
+ conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
|
|
|
|
+
|
|
|
|
+ kw["assert_same_host"] = False
|
|
|
|
+ kw["redirect"] = False
|
|
|
|
+
|
|
|
|
+ if "headers" not in kw:
|
|
|
|
+ kw["headers"] = self.headers.copy()
|
|
|
|
+
|
|
|
|
+- if self.proxy is not None and u.scheme == "http":
|
|
|
|
++ if self._proxy_requires_url_absolute_form(u):
|
|
|
|
+ response = conn.urlopen(method, url, **kw)
|
|
|
|
+ else:
|
|
|
|
+ response = conn.urlopen(method, u.request_uri, **kw)
|
|
|
|
+
|
|
|
|
+ redirect_location = redirect and response.get_redirect_location()
|
|
|
|
+ if not redirect_location:
|
|
|
|
+ return response
|
|
|
|
+
|
|
|
|
+@@ -387,16 +426,29 @@ class ProxyManager(PoolManager):
|
|
|
|
+ The URL of the proxy to be used.
|
|
|
|
+
|
|
|
|
+ :param proxy_headers:
|
|
|
|
+ A dictionary containing headers that will be sent to the proxy. In case
|
|
|
|
+ of HTTP they are being sent with each request, while in the
|
|
|
|
+ HTTPS/CONNECT case they are sent only once. Could be used for proxy
|
|
|
|
+ authentication.
|
|
|
|
+
|
|
|
|
++ :param proxy_ssl_context:
|
|
|
|
++ The proxy SSL context is used to establish the TLS connection to the
|
|
|
|
++ proxy when using HTTPS proxies.
|
|
|
|
++
|
|
|
|
++ :param use_forwarding_for_https:
|
|
|
|
++ (Defaults to False) If set to True will forward requests to the HTTPS
|
|
|
|
++ proxy to be made on behalf of the client instead of creating a TLS
|
|
|
|
++ tunnel via the CONNECT method. **Enabling this flag means that request
|
|
|
|
++ and response headers and content will be visible from the HTTPS proxy**
|
|
|
|
++ whereas tunneling keeps request and response headers and content
|
|
|
|
++ private. IP address, target hostname, SNI, and port are always visible
|
|
|
|
++ to an HTTPS proxy even when this flag is disabled.
|
|
|
|
++
|
|
|
|
+ Example:
|
|
|
|
+ >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
|
|
|
|
+ >>> r1 = proxy.request('GET', 'http://google.com/')
|
|
|
|
+ >>> r2 = proxy.request('GET', 'http://httpbin.org/')
|
|
|
|
+ >>> len(proxy.pools)
|
|
|
|
+ 1
|
|
|
|
+ >>> r3 = proxy.request('GET', 'https://httpbin.org/')
|
|
|
|
+ >>> r4 = proxy.request('GET', 'https://twitter.com/')
|
|
|
|
+@@ -406,38 +458,44 @@ class ProxyManager(PoolManager):
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ def __init__(
|
|
|
|
+ self,
|
|
|
|
+ proxy_url,
|
|
|
|
+ num_pools=10,
|
|
|
|
+ headers=None,
|
|
|
|
+ proxy_headers=None,
|
|
|
|
++ proxy_ssl_context=None,
|
|
|
|
++ use_forwarding_for_https=False,
|
|
|
|
+ **connection_pool_kw
|
|
|
|
+ ):
|
|
|
|
+
|
|
|
|
+ if isinstance(proxy_url, HTTPConnectionPool):
|
|
|
|
+ proxy_url = "%s://%s:%i" % (
|
|
|
|
+ proxy_url.scheme,
|
|
|
|
+ proxy_url.host,
|
|
|
|
+ proxy_url.port,
|
|
|
|
+ )
|
|
|
|
+ proxy = parse_url(proxy_url)
|
|
|
|
++
|
|
|
|
++ if proxy.scheme not in ("http", "https"):
|
|
|
|
++ raise ProxySchemeUnknown(proxy.scheme)
|
|
|
|
++
|
|
|
|
+ if not proxy.port:
|
|
|
|
+ port = port_by_scheme.get(proxy.scheme, 80)
|
|
|
|
+ proxy = proxy._replace(port=port)
|
|
|
|
+
|
|
|
|
+- if proxy.scheme not in ("http", "https"):
|
|
|
|
+- raise ProxySchemeUnknown(proxy.scheme)
|
|
|
|
+-
|
|
|
|
+ self.proxy = proxy
|
|
|
|
+ self.proxy_headers = proxy_headers or {}
|
|
|
|
++ self.proxy_ssl_context = proxy_ssl_context
|
|
|
|
++ self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
|
|
|
|
+
|
|
|
|
+ connection_pool_kw["_proxy"] = self.proxy
|
|
|
|
+ connection_pool_kw["_proxy_headers"] = self.proxy_headers
|
|
|
|
++ connection_pool_kw["_proxy_config"] = self.proxy_config
|
|
|
|
+
|
|
|
|
+ super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
|
|
|
|
+
|
|
|
|
+ def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
|
|
|
|
+ if scheme == "https":
|
|
|
|
+ return super(ProxyManager, self).connection_from_host(
|
|
|
|
+ host, port, scheme, pool_kwargs=pool_kwargs
|
|
|
|
+ )
|
|
|
|
+@@ -456,37 +514,23 @@ class ProxyManager(PoolManager):
|
|
|
|
+ netloc = parse_url(url).netloc
|
|
|
|
+ if netloc:
|
|
|
|
+ headers_["Host"] = netloc
|
|
|
|
+
|
|
|
|
+ if headers:
|
|
|
|
+ headers_.update(headers)
|
|
|
|
+ return headers_
|
|
|
|
+
|
|
|
|
+- def _validate_proxy_scheme_url_selection(self, url_scheme):
|
|
|
|
+- if url_scheme == "https" and self.proxy.scheme == "https":
|
|
|
|
+- warnings.warn(
|
|
|
|
+- "Your proxy configuration specified an HTTPS scheme for the proxy. "
|
|
|
|
+- "Are you sure you want to use HTTPS to contact the proxy? "
|
|
|
|
+- "This most likely indicates an error in your configuration. "
|
|
|
|
+- "Read this issue for more info: "
|
|
|
|
+- "https://github.com/urllib3/urllib3/issues/1850",
|
|
|
|
+- InvalidProxyConfigurationWarning,
|
|
|
|
+- stacklevel=3,
|
|
|
|
+- )
|
|
|
|
+-
|
|
|
|
+ def urlopen(self, method, url, redirect=True, **kw):
|
|
|
|
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
|
|
|
|
+ u = parse_url(url)
|
|
|
|
+- self._validate_proxy_scheme_url_selection(u.scheme)
|
|
|
|
+-
|
|
|
|
+- if u.scheme == "http":
|
|
|
|
+- # For proxied HTTPS requests, httplib sets the necessary headers
|
|
|
|
+- # on the CONNECT to the proxy. For HTTP, we'll definitely
|
|
|
|
+- # need to set 'Host' at the very least.
|
|
|
|
++ if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
|
|
|
|
++ # For connections using HTTP CONNECT, httplib sets the necessary
|
|
|
|
++ # headers on the CONNECT to the proxy. If we're not using CONNECT,
|
|
|
|
++ # we'll definitely need to set 'Host' at the very least.
|
|
|
|
+ headers = kw.get("headers", self.headers)
|
|
|
|
+ kw["headers"] = self._set_proxy_headers(url, headers)
|
|
|
|
+
|
|
|
|
+ return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def proxy_from_url(url, **kw):
|
|
|
|
+ return ProxyManager(proxy_url=url, **kw)
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/request.py b/third_party/python/urllib3/urllib3/request.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/request.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/request.py
|
|
|
|
+@@ -1,22 +1,21 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+
|
|
|
|
+ from .filepost import encode_multipart_formdata
|
|
|
|
+ from .packages.six.moves.urllib.parse import urlencode
|
|
|
|
+
|
|
|
|
+-
|
|
|
|
+ __all__ = ["RequestMethods"]
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class RequestMethods(object):
|
|
|
|
+ """
|
|
|
|
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
|
|
|
|
+- as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
|
|
|
|
+- :class:`~urllib3.poolmanager.PoolManager`.
|
|
|
|
++ as :class:`urllib3.HTTPConnectionPool` and
|
|
|
|
++ :class:`urllib3.PoolManager`.
|
|
|
|
+
|
|
|
|
+ Provides behavior for making common types of HTTP request methods and
|
|
|
|
+ decides which type of request field encoding to use.
|
|
|
|
+
|
|
|
|
+ Specifically,
|
|
|
|
+
|
|
|
|
+ :meth:`.request_encode_url` is for sending requests whose fields are
|
|
|
|
+ encoded in the URL (such as GET, HEAD, DELETE).
|
|
|
|
+@@ -106,19 +105,19 @@ class RequestMethods(object):
|
|
|
|
+ multipart_boundary=None,
|
|
|
|
+ **urlopen_kw
|
|
|
|
+ ):
|
|
|
|
+ """
|
|
|
|
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
|
|
|
|
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
|
|
|
|
+
|
|
|
|
+ When ``encode_multipart=True`` (default), then
|
|
|
|
+- :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
|
|
|
|
++ :func:`urllib3.encode_multipart_formdata` is used to encode
|
|
|
|
+ the payload with the appropriate content type. Otherwise
|
|
|
|
+- :meth:`urllib.urlencode` is used with the
|
|
|
|
++ :func:`urllib.parse.urlencode` is used with the
|
|
|
|
+ 'application/x-www-form-urlencoded' content type.
|
|
|
|
+
|
|
|
|
+ Multipart encoding must be used when posting files, and it's reasonably
|
|
|
|
+ safe to use it in other times too. However, it may break request
|
|
|
|
+ signing, such as with OAuth.
|
|
|
|
+
|
|
|
|
+ Supports an optional ``fields`` parameter of key/value strings AND
|
|
|
|
+ key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/response.py b/third_party/python/urllib3/urllib3/response.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/response.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/response.py
|
|
|
|
+@@ -1,35 +1,37 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+-from contextlib import contextmanager
|
|
|
|
+-import zlib
|
|
|
|
++
|
|
|
|
+ import io
|
|
|
|
+ import logging
|
|
|
|
++import zlib
|
|
|
|
++from contextlib import contextmanager
|
|
|
|
++from socket import error as SocketError
|
|
|
|
+ from socket import timeout as SocketTimeout
|
|
|
|
+-from socket import error as SocketError
|
|
|
|
+
|
|
|
|
+ try:
|
|
|
|
+ import brotli
|
|
|
|
+ except ImportError:
|
|
|
|
+ brotli = None
|
|
|
|
+
|
|
|
|
+ from ._collections import HTTPHeaderDict
|
|
|
|
++from .connection import BaseSSLError, HTTPException
|
|
|
|
+ from .exceptions import (
|
|
|
|
+ BodyNotHttplibCompatible,
|
|
|
|
++ DecodeError,
|
|
|
|
++ HTTPError,
|
|
|
|
++ IncompleteRead,
|
|
|
|
++ InvalidChunkLength,
|
|
|
|
++ InvalidHeader,
|
|
|
|
+ ProtocolError,
|
|
|
|
+- DecodeError,
|
|
|
|
+ ReadTimeoutError,
|
|
|
|
+ ResponseNotChunked,
|
|
|
|
+- IncompleteRead,
|
|
|
|
+- InvalidHeader,
|
|
|
|
+- HTTPError,
|
|
|
|
++ SSLError,
|
|
|
|
+ )
|
|
|
|
+-from .packages.six import string_types as basestring, PY3
|
|
|
|
+-from .packages.six.moves import http_client as httplib
|
|
|
|
+-from .connection import HTTPException, BaseSSLError
|
|
|
|
++from .packages import six
|
|
|
|
+ from .util.response import is_fp_closed, is_response_to_head
|
|
|
|
+
|
|
|
|
+ log = logging.getLogger(__name__)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class DeflateDecoder(object):
|
|
|
|
+ def __init__(self):
|
|
|
|
+ self._first_try = True
|
|
|
|
+@@ -102,21 +104,20 @@ class GzipDecoder(object):
|
|
|
|
+ if brotli is not None:
|
|
|
|
+
|
|
|
|
+ class BrotliDecoder(object):
|
|
|
|
+ # Supports both 'brotlipy' and 'Brotli' packages
|
|
|
|
+ # since they share an import name. The top branches
|
|
|
|
+ # are for 'brotlipy' and bottom branches for 'Brotli'
|
|
|
|
+ def __init__(self):
|
|
|
|
+ self._obj = brotli.Decompressor()
|
|
|
|
+-
|
|
|
|
+- def decompress(self, data):
|
|
|
|
+ if hasattr(self._obj, "decompress"):
|
|
|
|
+- return self._obj.decompress(data)
|
|
|
|
+- return self._obj.process(data)
|
|
|
|
++ self.decompress = self._obj.decompress
|
|
|
|
++ else:
|
|
|
|
++ self.decompress = self._obj.process
|
|
|
|
+
|
|
|
|
+ def flush(self):
|
|
|
|
+ if hasattr(self._obj, "flush"):
|
|
|
|
+ return self._obj.flush()
|
|
|
|
+ return b""
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class MultiDecoder(object):
|
|
|
|
+@@ -152,33 +153,33 @@ def _get_decoder(mode):
|
|
|
|
+
|
|
|
|
+ return DeflateDecoder()
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class HTTPResponse(io.IOBase):
|
|
|
|
+ """
|
|
|
|
+ HTTP Response container.
|
|
|
|
+
|
|
|
|
+- Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
|
|
|
|
++ Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
|
|
|
|
+ loaded and decoded on-demand when the ``data`` property is accessed. This
|
|
|
|
+ class is also compatible with the Python standard library's :mod:`io`
|
|
|
|
+ module, and can hence be treated as a readable object in the context of that
|
|
|
|
+ framework.
|
|
|
|
+
|
|
|
|
+- Extra parameters for behaviour not present in httplib.HTTPResponse:
|
|
|
|
++ Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
|
|
|
|
+
|
|
|
|
+ :param preload_content:
|
|
|
|
+ If True, the response's body will be preloaded during construction.
|
|
|
|
+
|
|
|
|
+ :param decode_content:
|
|
|
|
+ If True, will attempt to decode the body based on the
|
|
|
|
+ 'content-encoding' header.
|
|
|
|
+
|
|
|
|
+ :param original_response:
|
|
|
|
+- When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
|
|
|
|
++ When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
|
|
|
|
+ object, it's convenient to include the original for debug purposes. It's
|
|
|
|
+ otherwise unused.
|
|
|
|
+
|
|
|
|
+ :param retries:
|
|
|
|
+ The retries contains the last :class:`~urllib3.util.retry.Retry` that
|
|
|
|
+ was used during the request.
|
|
|
|
+
|
|
|
|
+ :param enforce_content_length:
|
|
|
|
+@@ -228,17 +229,17 @@ class HTTPResponse(io.IOBase):
|
|
|
|
+ self._decoder = None
|
|
|
|
+ self._body = None
|
|
|
|
+ self._fp = None
|
|
|
|
+ self._original_response = original_response
|
|
|
|
+ self._fp_bytes_read = 0
|
|
|
|
+ self.msg = msg
|
|
|
|
+ self._request_url = request_url
|
|
|
|
+
|
|
|
|
+- if body and isinstance(body, (basestring, bytes)):
|
|
|
|
++ if body and isinstance(body, (six.string_types, bytes)):
|
|
|
|
+ self._body = body
|
|
|
|
+
|
|
|
|
+ self._pool = pool
|
|
|
|
+ self._connection = connection
|
|
|
|
+
|
|
|
|
+ if hasattr(body, "read"):
|
|
|
|
+ self._fp = body
|
|
|
|
+
|
|
|
|
+@@ -286,35 +287,35 @@ class HTTPResponse(io.IOBase):
|
|
|
|
+ """
|
|
|
|
+ try:
|
|
|
|
+ self.read()
|
|
|
|
+ except (HTTPError, SocketError, BaseSSLError, HTTPException):
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+ @property
|
|
|
|
+ def data(self):
|
|
|
|
+- # For backwords-compat with earlier urllib3 0.4 and earlier.
|
|
|
|
++ # For backwards-compat with earlier urllib3 0.4 and earlier.
|
|
|
|
+ if self._body:
|
|
|
|
+ return self._body
|
|
|
|
+
|
|
|
|
+ if self._fp:
|
|
|
|
+ return self.read(cache_content=True)
|
|
|
|
+
|
|
|
|
+ @property
|
|
|
|
+ def connection(self):
|
|
|
|
+ return self._connection
|
|
|
|
+
|
|
|
|
+ def isclosed(self):
|
|
|
|
+ return is_fp_closed(self._fp)
|
|
|
|
+
|
|
|
|
+ def tell(self):
|
|
|
|
+ """
|
|
|
|
+ Obtain the number of bytes pulled over the wire so far. May differ from
|
|
|
|
+- the amount of content returned by :meth:``HTTPResponse.read`` if bytes
|
|
|
|
+- are encoded on the wire (e.g, compressed).
|
|
|
|
++ the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
|
|
|
|
++ if bytes are encoded on the wire (e.g, compressed).
|
|
|
|
+ """
|
|
|
|
+ return self._fp_bytes_read
|
|
|
|
+
|
|
|
|
+ def _init_length(self, request_method):
|
|
|
|
+ """
|
|
|
|
+ Set initial length value for Response content if available.
|
|
|
|
+ """
|
|
|
|
+ length = self.headers.get("content-length")
|
|
|
|
+@@ -438,20 +439,19 @@ class HTTPResponse(io.IOBase):
|
|
|
|
+
|
|
|
|
+ except SocketTimeout:
|
|
|
|
+ # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
|
|
|
|
+ # there is yet no clean way to get at it from this context.
|
|
|
|
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
|
|
|
|
+
|
|
|
|
+ except BaseSSLError as e:
|
|
|
|
+ # FIXME: Is there a better way to differentiate between SSLErrors?
|
|
|
|
+- if "read operation timed out" not in str(e): # Defensive:
|
|
|
|
+- # This shouldn't happen but just in case we're missing an edge
|
|
|
|
+- # case, let's avoid swallowing SSL errors.
|
|
|
|
+- raise
|
|
|
|
++ if "read operation timed out" not in str(e):
|
|
|
|
++ # SSL errors related to framing/MAC get wrapped and reraised here
|
|
|
|
++ raise SSLError(e)
|
|
|
|
+
|
|
|
|
+ raise ReadTimeoutError(self._pool, None, "Read timed out.")
|
|
|
|
+
|
|
|
|
+ except (HTTPException, SocketError) as e:
|
|
|
|
+ # This includes IncompleteRead.
|
|
|
|
+ raise ProtocolError("Connection broken: %r" % e, e)
|
|
|
|
+
|
|
|
|
+ # If no exception is thrown, we should avoid cleaning up
|
|
|
|
+@@ -475,17 +475,17 @@ class HTTPResponse(io.IOBase):
|
|
|
|
+
|
|
|
|
+ # If we hold the original response but it's closed now, we should
|
|
|
|
+ # return the connection back to the pool.
|
|
|
|
+ if self._original_response and self._original_response.isclosed():
|
|
|
|
+ self.release_conn()
|
|
|
|
+
|
|
|
|
+ def read(self, amt=None, decode_content=None, cache_content=False):
|
|
|
|
+ """
|
|
|
|
+- Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
|
|
|
|
++ Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
|
|
|
|
+ parameters: ``decode_content`` and ``cache_content``.
|
|
|
|
+
|
|
|
|
+ :param amt:
|
|
|
|
+ How much of the content to read. If specified, caching is skipped
|
|
|
|
+ because it doesn't make sense to cache partial content as the full
|
|
|
|
+ response.
|
|
|
|
+
|
|
|
|
+ :param decode_content:
|
|
|
|
+@@ -576,46 +576,46 @@ class HTTPResponse(io.IOBase):
|
|
|
|
+ data = self.read(amt=amt, decode_content=decode_content)
|
|
|
|
+
|
|
|
|
+ if data:
|
|
|
|
+ yield data
|
|
|
|
+
|
|
|
|
+ @classmethod
|
|
|
|
+ def from_httplib(ResponseCls, r, **response_kw):
|
|
|
|
+ """
|
|
|
|
+- Given an :class:`httplib.HTTPResponse` instance ``r``, return a
|
|
|
|
++ Given an :class:`http.client.HTTPResponse` instance ``r``, return a
|
|
|
|
+ corresponding :class:`urllib3.response.HTTPResponse` object.
|
|
|
|
+
|
|
|
|
+ Remaining parameters are passed to the HTTPResponse constructor, along
|
|
|
|
+ with ``original_response=r``.
|
|
|
|
+ """
|
|
|
|
+ headers = r.msg
|
|
|
|
+
|
|
|
|
+ if not isinstance(headers, HTTPHeaderDict):
|
|
|
|
+- if PY3:
|
|
|
|
+- headers = HTTPHeaderDict(headers.items())
|
|
|
|
+- else:
|
|
|
|
++ if six.PY2:
|
|
|
|
+ # Python 2.7
|
|
|
|
+ headers = HTTPHeaderDict.from_httplib(headers)
|
|
|
|
++ else:
|
|
|
|
++ headers = HTTPHeaderDict(headers.items())
|
|
|
|
+
|
|
|
|
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
|
|
|
|
+ strict = getattr(r, "strict", 0)
|
|
|
|
+ resp = ResponseCls(
|
|
|
|
+ body=r,
|
|
|
|
+ headers=headers,
|
|
|
|
+ status=r.status,
|
|
|
|
+ version=r.version,
|
|
|
|
+ reason=r.reason,
|
|
|
|
+ strict=strict,
|
|
|
|
+ original_response=r,
|
|
|
|
+ **response_kw
|
|
|
|
+ )
|
|
|
|
+ return resp
|
|
|
|
+
|
|
|
|
+- # Backwards-compatibility methods for httplib.HTTPResponse
|
|
|
|
++ # Backwards-compatibility methods for http.client.HTTPResponse
|
|
|
|
+ def getheaders(self):
|
|
|
|
+ return self.headers
|
|
|
|
+
|
|
|
|
+ def getheader(self, name, default=None):
|
|
|
|
+ return self.headers.get(name, default)
|
|
|
|
+
|
|
|
|
+ # Backwards compatibility for http.cookiejar
|
|
|
|
+ def info(self):
|
|
|
|
+@@ -675,35 +675,35 @@ class HTTPResponse(io.IOBase):
|
|
|
|
+ return 0
|
|
|
|
+ else:
|
|
|
|
+ b[: len(temp)] = temp
|
|
|
|
+ return len(temp)
|
|
|
|
+
|
|
|
|
+ def supports_chunked_reads(self):
|
|
|
|
+ """
|
|
|
|
+ Checks if the underlying file-like object looks like a
|
|
|
|
+- httplib.HTTPResponse object. We do this by testing for the fp
|
|
|
|
+- attribute. If it is present we assume it returns raw chunks as
|
|
|
|
++ :class:`http.client.HTTPResponse` object. We do this by testing for
|
|
|
|
++ the fp attribute. If it is present we assume it returns raw chunks as
|
|
|
|
+ processed by read_chunked().
|
|
|
|
+ """
|
|
|
|
+ return hasattr(self._fp, "fp")
|
|
|
|
+
|
|
|
|
+ def _update_chunk_length(self):
|
|
|
|
+ # First, we'll figure out length of a chunk and then
|
|
|
|
+ # we'll try to read it from socket.
|
|
|
|
+ if self.chunk_left is not None:
|
|
|
|
+ return
|
|
|
|
+ line = self._fp.fp.readline()
|
|
|
|
+ line = line.split(b";", 1)[0]
|
|
|
|
+ try:
|
|
|
|
+ self.chunk_left = int(line, 16)
|
|
|
|
+ except ValueError:
|
|
|
|
+ # Invalid chunked protocol response, abort.
|
|
|
|
+ self.close()
|
|
|
|
+- raise httplib.IncompleteRead(line)
|
|
|
|
++ raise InvalidChunkLength(self, line)
|
|
|
|
+
|
|
|
|
+ def _handle_chunk(self, amt):
|
|
|
|
+ returned_chunk = None
|
|
|
|
+ if amt is None:
|
|
|
|
+ chunk = self._fp._safe_read(self.chunk_left)
|
|
|
|
+ returned_chunk = chunk
|
|
|
|
+ self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
|
|
|
|
+ self.chunk_left = None
|
|
|
|
+@@ -740,17 +740,17 @@ class HTTPResponse(io.IOBase):
|
|
|
|
+ # FIXME: Rewrite this method and make it a class with a better structured logic.
|
|
|
|
+ if not self.chunked:
|
|
|
|
+ raise ResponseNotChunked(
|
|
|
|
+ "Response is not chunked. "
|
|
|
|
+ "Header 'transfer-encoding: chunked' is missing."
|
|
|
|
+ )
|
|
|
|
+ if not self.supports_chunked_reads():
|
|
|
|
+ raise BodyNotHttplibCompatible(
|
|
|
|
+- "Body should be httplib.HTTPResponse like. "
|
|
|
|
++ "Body should be http.client.HTTPResponse like. "
|
|
|
|
+ "It should have have an fp attribute which returns raw chunks."
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ with self._error_catcher():
|
|
|
|
+ # Don't bother reading the body of a HEAD request.
|
|
|
|
+ if self._original_response and is_response_to_head(self._original_response):
|
|
|
|
+ self._original_response.close()
|
|
|
|
+ return
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/__init__.py b/third_party/python/urllib3/urllib3/util/__init__.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/__init__.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/__init__.py
|
|
|
|
+@@ -1,46 +1,49 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+
|
|
|
|
+ # For backwards compatibility, provide imports that used to be here.
|
|
|
|
+ from .connection import is_connection_dropped
|
|
|
|
+-from .request import make_headers
|
|
|
|
++from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
|
|
|
|
+ from .response import is_fp_closed
|
|
|
|
++from .retry import Retry
|
|
|
|
+ from .ssl_ import (
|
|
|
|
+- SSLContext,
|
|
|
|
++ ALPN_PROTOCOLS,
|
|
|
|
+ HAS_SNI,
|
|
|
|
+ IS_PYOPENSSL,
|
|
|
|
+ IS_SECURETRANSPORT,
|
|
|
|
++ PROTOCOL_TLS,
|
|
|
|
++ SSLContext,
|
|
|
|
+ assert_fingerprint,
|
|
|
|
+ resolve_cert_reqs,
|
|
|
|
+ resolve_ssl_version,
|
|
|
|
+ ssl_wrap_socket,
|
|
|
|
+- PROTOCOL_TLS,
|
|
|
|
+ )
|
|
|
|
+-from .timeout import current_time, Timeout
|
|
|
|
+-
|
|
|
|
+-from .retry import Retry
|
|
|
|
+-from .url import get_host, parse_url, split_first, Url
|
|
|
|
++from .timeout import Timeout, current_time
|
|
|
|
++from .url import Url, get_host, parse_url, split_first
|
|
|
|
+ from .wait import wait_for_read, wait_for_write
|
|
|
|
+
|
|
|
|
+ __all__ = (
|
|
|
|
+ "HAS_SNI",
|
|
|
|
+ "IS_PYOPENSSL",
|
|
|
|
+ "IS_SECURETRANSPORT",
|
|
|
|
+ "SSLContext",
|
|
|
|
+ "PROTOCOL_TLS",
|
|
|
|
++ "ALPN_PROTOCOLS",
|
|
|
|
+ "Retry",
|
|
|
|
+ "Timeout",
|
|
|
|
+ "Url",
|
|
|
|
+ "assert_fingerprint",
|
|
|
|
+ "current_time",
|
|
|
|
+ "is_connection_dropped",
|
|
|
|
+ "is_fp_closed",
|
|
|
|
+ "get_host",
|
|
|
|
+ "parse_url",
|
|
|
|
+ "make_headers",
|
|
|
|
+ "resolve_cert_reqs",
|
|
|
|
+ "resolve_ssl_version",
|
|
|
|
+ "split_first",
|
|
|
|
+ "ssl_wrap_socket",
|
|
|
|
+ "wait_for_read",
|
|
|
|
+ "wait_for_write",
|
|
|
|
++ "SKIP_HEADER",
|
|
|
|
++ "SKIPPABLE_HEADERS",
|
|
|
|
+ )
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/connection.py b/third_party/python/urllib3/urllib3/util/connection.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/connection.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/connection.py
|
|
|
|
+@@ -1,20 +1,25 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ import socket
|
|
|
|
++
|
|
|
|
++from urllib3.exceptions import LocationParseError
|
|
|
|
++
|
|
|
|
++from ..contrib import _appengine_environ
|
|
|
|
++from ..packages import six
|
|
|
|
+ from .wait import NoWayToWaitForSocketError, wait_for_read
|
|
|
|
+-from ..contrib import _appengine_environ
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def is_connection_dropped(conn): # Platform-specific
|
|
|
|
+ """
|
|
|
|
+ Returns True if the connection is dropped and should be closed.
|
|
|
|
+
|
|
|
|
+ :param conn:
|
|
|
|
+- :class:`httplib.HTTPConnection` object.
|
|
|
|
++ :class:`http.client.HTTPConnection` object.
|
|
|
|
+
|
|
|
|
+ Note: For platforms like AppEngine, this will always return ``False`` to
|
|
|
|
+ let the platform handle connection recycling transparently for us.
|
|
|
|
+ """
|
|
|
|
+ sock = getattr(conn, "sock", False)
|
|
|
|
+ if sock is False: # Platform-specific: AppEngine
|
|
|
|
+ return False
|
|
|
|
+ if sock is None: # Connection already closed (such as by httplib).
|
|
|
|
+@@ -37,32 +42,39 @@ def create_connection(
|
|
|
|
+ socket_options=None,
|
|
|
|
+ ):
|
|
|
|
+ """Connect to *address* and return the socket object.
|
|
|
|
+
|
|
|
|
+ Convenience function. Connect to *address* (a 2-tuple ``(host,
|
|
|
|
+ port)``) and return the socket object. Passing the optional
|
|
|
|
+ *timeout* parameter will set the timeout on the socket instance
|
|
|
|
+ before attempting to connect. If no *timeout* is supplied, the
|
|
|
|
+- global default timeout setting returned by :func:`getdefaulttimeout`
|
|
|
|
++ global default timeout setting returned by :func:`socket.getdefaulttimeout`
|
|
|
|
+ is used. If *source_address* is set it must be a tuple of (host, port)
|
|
|
|
+ for the socket to bind as a source address before making the connection.
|
|
|
|
+ An host of '' or port 0 tells the OS to use the default.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ host, port = address
|
|
|
|
+ if host.startswith("["):
|
|
|
|
+ host = host.strip("[]")
|
|
|
|
+ err = None
|
|
|
|
+
|
|
|
|
+ # Using the value from allowed_gai_family() in the context of getaddrinfo lets
|
|
|
|
+ # us select whether to work with IPv4 DNS records, IPv6 records, or both.
|
|
|
|
+ # The original create_connection function always returns all records.
|
|
|
|
+ family = allowed_gai_family()
|
|
|
|
+
|
|
|
|
++ try:
|
|
|
|
++ host.encode("idna")
|
|
|
|
++ except UnicodeError:
|
|
|
|
++ return six.raise_from(
|
|
|
|
++ LocationParseError(u"'%s', label empty or too long" % host), None
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
+ for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
|
|
|
|
+ af, socktype, proto, canonname, sa = res
|
|
|
|
+ sock = None
|
|
|
|
+ try:
|
|
|
|
+ sock = socket.socket(af, socktype, proto)
|
|
|
|
+
|
|
|
|
+ # If provided, set socket level options before connecting.
|
|
|
|
+ _set_socket_options(sock, socket_options)
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/proxy.py b/third_party/python/urllib3/urllib3/util/proxy.py
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/proxy.py
|
|
|
|
+@@ -0,0 +1,56 @@
|
|
|
|
++from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++def connection_requires_http_tunnel(
|
|
|
|
++ proxy_url=None, proxy_config=None, destination_scheme=None
|
|
|
|
++):
|
|
|
|
++ """
|
|
|
|
++ Returns True if the connection requires an HTTP CONNECT through the proxy.
|
|
|
|
++
|
|
|
|
++ :param URL proxy_url:
|
|
|
|
++ URL of the proxy.
|
|
|
|
++ :param ProxyConfig proxy_config:
|
|
|
|
++ Proxy configuration from poolmanager.py
|
|
|
|
++ :param str destination_scheme:
|
|
|
|
++ The scheme of the destination. (i.e https, http, etc)
|
|
|
|
++ """
|
|
|
|
++ # If we're not using a proxy, no way to use a tunnel.
|
|
|
|
++ if proxy_url is None:
|
|
|
|
++ return False
|
|
|
|
++
|
|
|
|
++ # HTTP destinations never require tunneling, we always forward.
|
|
|
|
++ if destination_scheme == "http":
|
|
|
|
++ return False
|
|
|
|
++
|
|
|
|
++ # Support for forwarding with HTTPS proxies and HTTPS destinations.
|
|
|
|
++ if (
|
|
|
|
++ proxy_url.scheme == "https"
|
|
|
|
++ and proxy_config
|
|
|
|
++ and proxy_config.use_forwarding_for_https
|
|
|
|
++ ):
|
|
|
|
++ return False
|
|
|
|
++
|
|
|
|
++ # Otherwise always use a tunnel.
|
|
|
|
++ return True
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++def create_proxy_ssl_context(
|
|
|
|
++ ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
|
|
|
|
++):
|
|
|
|
++ """
|
|
|
|
++ Generates a default proxy ssl context if one hasn't been provided by the
|
|
|
|
++ user.
|
|
|
|
++ """
|
|
|
|
++ ssl_context = create_urllib3_context(
|
|
|
|
++ ssl_version=resolve_ssl_version(ssl_version),
|
|
|
|
++ cert_reqs=resolve_cert_reqs(cert_reqs),
|
|
|
|
++ )
|
|
|
|
++ if (
|
|
|
|
++ not ca_certs
|
|
|
|
++ and not ca_cert_dir
|
|
|
|
++ and not ca_cert_data
|
|
|
|
++ and hasattr(ssl_context, "load_default_certs")
|
|
|
|
++ ):
|
|
|
|
++ ssl_context.load_default_certs()
|
|
|
|
++
|
|
|
|
++ return ssl_context
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/queue.py b/third_party/python/urllib3/urllib3/util/queue.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/queue.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/queue.py
|
|
|
|
+@@ -1,9 +1,10 @@
|
|
|
|
+ import collections
|
|
|
|
++
|
|
|
|
+ from ..packages import six
|
|
|
|
+ from ..packages.six.moves import queue
|
|
|
|
+
|
|
|
|
+ if six.PY2:
|
|
|
|
+ # Queue is imported for side effects on MS Windows. See issue #229.
|
|
|
|
+ import Queue as _unused_module_Queue # noqa: F401
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/request.py b/third_party/python/urllib3/urllib3/util/request.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/request.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/request.py
|
|
|
|
+@@ -1,13 +1,21 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ from base64 import b64encode
|
|
|
|
+
|
|
|
|
++from ..exceptions import UnrewindableBodyError
|
|
|
|
+ from ..packages.six import b, integer_types
|
|
|
|
+-from ..exceptions import UnrewindableBodyError
|
|
|
|
++
|
|
|
|
++# Pass as a value within ``headers`` to skip
|
|
|
|
++# emitting some HTTP headers that are added automatically.
|
|
|
|
++# The only headers that are supported are ``Accept-Encoding``,
|
|
|
|
++# ``Host``, and ``User-Agent``.
|
|
|
|
++SKIP_HEADER = "@@@SKIP_HEADER@@@"
|
|
|
|
++SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
|
|
|
|
+
|
|
|
|
+ ACCEPT_ENCODING = "gzip,deflate"
|
|
|
|
+ try:
|
|
|
|
+ import brotli as _unused_module_brotli # noqa: F401
|
|
|
|
+ except ImportError:
|
|
|
|
+ pass
|
|
|
|
+ else:
|
|
|
|
+ ACCEPT_ENCODING += ",br"
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/response.py b/third_party/python/urllib3/urllib3/util/response.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/response.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/response.py
|
|
|
|
+@@ -1,12 +1,14 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+-from ..packages.six.moves import http_client as httplib
|
|
|
|
++
|
|
|
|
++from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
|
|
|
|
+
|
|
|
|
+ from ..exceptions import HeaderParsingError
|
|
|
|
++from ..packages.six.moves import http_client as httplib
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def is_fp_closed(obj):
|
|
|
|
+ """
|
|
|
|
+ Checks whether a given file-like object is closed.
|
|
|
|
+
|
|
|
|
+ :param obj:
|
|
|
|
+ The file-like object to check.
|
|
|
|
+@@ -37,18 +39,17 @@ def is_fp_closed(obj):
|
|
|
|
+
|
|
|
|
+ def assert_header_parsing(headers):
|
|
|
|
+ """
|
|
|
|
+ Asserts whether all headers have been successfully parsed.
|
|
|
|
+ Extracts encountered errors from the result of parsing headers.
|
|
|
|
+
|
|
|
|
+ Only works on Python 3.
|
|
|
|
+
|
|
|
|
+- :param headers: Headers to verify.
|
|
|
|
+- :type headers: `httplib.HTTPMessage`.
|
|
|
|
++ :param http.client.HTTPMessage headers: Headers to verify.
|
|
|
|
+
|
|
|
|
+ :raises urllib3.exceptions.HeaderParsingError:
|
|
|
|
+ If parsing errors are found.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ # This will fail silently if we pass in the wrong kind of parameter.
|
|
|
|
+ # To make debugging easier add an explicit check.
|
|
|
|
+ if not isinstance(headers, httplib.HTTPMessage):
|
|
|
|
+@@ -61,26 +62,46 @@ def assert_header_parsing(headers):
|
|
|
|
+ if get_payload:
|
|
|
|
+ # get_payload is actually email.message.Message.get_payload;
|
|
|
|
+ # we're only interested in the result if it's not a multipart message
|
|
|
|
+ if not headers.is_multipart():
|
|
|
|
+ payload = get_payload()
|
|
|
|
+
|
|
|
|
+ if isinstance(payload, (bytes, str)):
|
|
|
|
+ unparsed_data = payload
|
|
|
|
++ if defects:
|
|
|
|
++ # httplib is assuming a response body is available
|
|
|
|
++ # when parsing headers even when httplib only sends
|
|
|
|
++ # header data to parse_headers() This results in
|
|
|
|
++ # defects on multipart responses in particular.
|
|
|
|
++ # See: https://github.com/urllib3/urllib3/issues/800
|
|
|
|
++
|
|
|
|
++ # So we ignore the following defects:
|
|
|
|
++ # - StartBoundaryNotFoundDefect:
|
|
|
|
++ # The claimed start boundary was never found.
|
|
|
|
++ # - MultipartInvariantViolationDefect:
|
|
|
|
++ # A message claimed to be a multipart but no subparts were found.
|
|
|
|
++ defects = [
|
|
|
|
++ defect
|
|
|
|
++ for defect in defects
|
|
|
|
++ if not isinstance(
|
|
|
|
++ defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
|
|
|
|
++ )
|
|
|
|
++ ]
|
|
|
|
+
|
|
|
|
+ if defects or unparsed_data:
|
|
|
|
+ raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def is_response_to_head(response):
|
|
|
|
+ """
|
|
|
|
+ Checks whether the request of a response has been a HEAD-request.
|
|
|
|
+ Handles the quirks of AppEngine.
|
|
|
|
+
|
|
|
|
+- :param conn:
|
|
|
|
+- :type conn: :class:`httplib.HTTPResponse`
|
|
|
|
++ :param http.client.HTTPResponse response:
|
|
|
|
++ Response to check if the originating request
|
|
|
|
++ used 'HEAD' as a method.
|
|
|
|
+ """
|
|
|
|
+ # FIXME: Can we do this somehow without accessing private httplib _method?
|
|
|
|
+ method = response._method
|
|
|
|
+ if isinstance(method, int): # Platform-specific: Appengine
|
|
|
|
+ return method == 3
|
|
|
|
+ return method.upper() == "HEAD"
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/retry.py b/third_party/python/urllib3/urllib3/util/retry.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/retry.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/retry.py
|
|
|
|
+@@ -1,39 +1,83 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
++import email
|
|
|
|
++import logging
|
|
|
|
++import re
|
|
|
|
+ import time
|
|
|
|
+-import logging
|
|
|
|
++import warnings
|
|
|
|
+ from collections import namedtuple
|
|
|
|
+ from itertools import takewhile
|
|
|
|
+-import email
|
|
|
|
+-import re
|
|
|
|
+
|
|
|
|
+ from ..exceptions import (
|
|
|
|
+ ConnectTimeoutError,
|
|
|
|
++ InvalidHeader,
|
|
|
|
+ MaxRetryError,
|
|
|
|
+ ProtocolError,
|
|
|
|
++ ProxyError,
|
|
|
|
+ ReadTimeoutError,
|
|
|
|
+ ResponseError,
|
|
|
|
+- InvalidHeader,
|
|
|
|
+- ProxyError,
|
|
|
|
+ )
|
|
|
|
+ from ..packages import six
|
|
|
|
+
|
|
|
|
+-
|
|
|
|
+ log = logging.getLogger(__name__)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ # Data structure for representing the metadata of requests that result in a retry.
|
|
|
|
+ RequestHistory = namedtuple(
|
|
|
|
+ "RequestHistory", ["method", "url", "error", "status", "redirect_location"]
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+
|
|
|
|
++# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
|
|
|
|
++_Default = object()
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++class _RetryMeta(type):
|
|
|
|
++ @property
|
|
|
|
++ def DEFAULT_METHOD_WHITELIST(cls):
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
|
|
|
|
++ "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++ return cls.DEFAULT_ALLOWED_METHODS
|
|
|
|
++
|
|
|
|
++ @DEFAULT_METHOD_WHITELIST.setter
|
|
|
|
++ def DEFAULT_METHOD_WHITELIST(cls, value):
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
|
|
|
|
++ "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++ cls.DEFAULT_ALLOWED_METHODS = value
|
|
|
|
++
|
|
|
|
++ @property
|
|
|
|
++ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
|
|
|
|
++ "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++ return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
|
|
|
|
++
|
|
|
|
++ @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
|
|
|
|
++ def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
|
|
|
|
++ "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++ cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++@six.add_metaclass(_RetryMeta)
|
|
|
|
+ class Retry(object):
|
|
|
|
+- """ Retry configuration.
|
|
|
|
++ """Retry configuration.
|
|
|
|
+
|
|
|
|
+ Each retry attempt will create a new Retry object with updated values, so
|
|
|
|
+ they can be safely reused.
|
|
|
|
+
|
|
|
|
+ Retries can be defined as a default for a pool::
|
|
|
|
+
|
|
|
|
+ retries = Retry(connect=5, read=2, redirect=5)
|
|
|
|
+ http = PoolManager(retries=retries)
|
|
|
|
+@@ -49,18 +93,17 @@ class Retry(object):
|
|
|
|
+
|
|
|
|
+ Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
|
|
|
|
+ retries are disabled, in which case the causing exception will be raised.
|
|
|
|
+
|
|
|
|
+ :param int total:
|
|
|
|
+ Total number of retries to allow. Takes precedence over other counts.
|
|
|
|
+
|
|
|
|
+ Set to ``None`` to remove this constraint and fall back on other
|
|
|
|
+- counts. It's a good idea to set this to some sensibly-high value to
|
|
|
|
+- account for unexpected edge cases and avoid infinite retry loops.
|
|
|
|
++ counts.
|
|
|
|
+
|
|
|
|
+ Set to ``0`` to fail on the first retry.
|
|
|
|
+
|
|
|
|
+ Set to ``False`` to disable and imply ``raise_on_redirect=False``.
|
|
|
|
+
|
|
|
|
+ :param int connect:
|
|
|
|
+ How many connection-related errors to retry on.
|
|
|
|
+
|
|
|
|
+@@ -91,28 +134,45 @@ class Retry(object):
|
|
|
|
+ :param int status:
|
|
|
|
+ How many times to retry on bad status codes.
|
|
|
|
+
|
|
|
|
+ These are retries made on responses, where status code matches
|
|
|
|
+ ``status_forcelist``.
|
|
|
|
+
|
|
|
|
+ Set to ``0`` to fail on the first retry of this type.
|
|
|
|
+
|
|
|
|
+- :param iterable method_whitelist:
|
|
|
|
++ :param int other:
|
|
|
|
++ How many times to retry on other errors.
|
|
|
|
++
|
|
|
|
++ Other errors are errors that are not connect, read, redirect or status errors.
|
|
|
|
++ These errors might be raised after the request was sent to the server, so the
|
|
|
|
++ request might have side-effects.
|
|
|
|
++
|
|
|
|
++ Set to ``0`` to fail on the first retry of this type.
|
|
|
|
++
|
|
|
|
++ If ``total`` is not set, it's a good idea to set this to 0 to account
|
|
|
|
++ for unexpected edge cases and avoid infinite retry loops.
|
|
|
|
++
|
|
|
|
++ :param iterable allowed_methods:
|
|
|
|
+ Set of uppercased HTTP method verbs that we should retry on.
|
|
|
|
+
|
|
|
|
+ By default, we only retry on methods which are considered to be
|
|
|
|
+ idempotent (multiple requests with the same parameters end with the
|
|
|
|
+- same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
|
|
|
|
++ same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
|
|
|
|
+
|
|
|
|
+ Set to a ``False`` value to retry on any verb.
|
|
|
|
+
|
|
|
|
++ .. warning::
|
|
|
|
++
|
|
|
|
++ Previously this parameter was named ``method_whitelist``, that
|
|
|
|
++ usage is deprecated in v1.26.0 and will be removed in v2.0.
|
|
|
|
++
|
|
|
|
+ :param iterable status_forcelist:
|
|
|
|
+ A set of integer HTTP status codes that we should force a retry on.
|
|
|
|
+- A retry is initiated if the request method is in ``method_whitelist``
|
|
|
|
++ A retry is initiated if the request method is in ``allowed_methods``
|
|
|
|
+ and the response status code is in ``status_forcelist``.
|
|
|
|
+
|
|
|
|
+ By default, this is disabled with ``None``.
|
|
|
|
+
|
|
|
|
+ :param float backoff_factor:
|
|
|
|
+ A backoff factor to apply between attempts after the second try
|
|
|
|
+ (most errors are resolved immediately by a second try without a
|
|
|
|
+ delay). urllib3 will sleep for::
|
|
|
|
+@@ -143,81 +203,123 @@ class Retry(object):
|
|
|
|
+ :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
|
|
|
|
+
|
|
|
|
+ :param iterable remove_headers_on_redirect:
|
|
|
|
+ Sequence of headers to remove from the request when a response
|
|
|
|
+ indicating a redirect is returned before firing off the redirected
|
|
|
|
+ request.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+- DEFAULT_METHOD_WHITELIST = frozenset(
|
|
|
|
++ #: Default methods to be used for ``allowed_methods``
|
|
|
|
++ DEFAULT_ALLOWED_METHODS = frozenset(
|
|
|
|
+ ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
++ #: Default status codes to be used for ``status_forcelist``
|
|
|
|
+ RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
|
|
|
|
+
|
|
|
|
+- DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"])
|
|
|
|
++ #: Default headers to be used for ``remove_headers_on_redirect``
|
|
|
|
++ DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
|
|
|
|
+
|
|
|
|
+ #: Maximum backoff time.
|
|
|
|
+ BACKOFF_MAX = 120
|
|
|
|
+
|
|
|
|
+ def __init__(
|
|
|
|
+ self,
|
|
|
|
+ total=10,
|
|
|
|
+ connect=None,
|
|
|
|
+ read=None,
|
|
|
|
+ redirect=None,
|
|
|
|
+ status=None,
|
|
|
|
+- method_whitelist=DEFAULT_METHOD_WHITELIST,
|
|
|
|
++ other=None,
|
|
|
|
++ allowed_methods=_Default,
|
|
|
|
+ status_forcelist=None,
|
|
|
|
+ backoff_factor=0,
|
|
|
|
+ raise_on_redirect=True,
|
|
|
|
+ raise_on_status=True,
|
|
|
|
+ history=None,
|
|
|
|
+ respect_retry_after_header=True,
|
|
|
|
+- remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST,
|
|
|
|
++ remove_headers_on_redirect=_Default,
|
|
|
|
++ # TODO: Deprecated, remove in v2.0
|
|
|
|
++ method_whitelist=_Default,
|
|
|
|
+ ):
|
|
|
|
+
|
|
|
|
++ if method_whitelist is not _Default:
|
|
|
|
++ if allowed_methods is not _Default:
|
|
|
|
++ raise ValueError(
|
|
|
|
++ "Using both 'allowed_methods' and "
|
|
|
|
++ "'method_whitelist' together is not allowed. "
|
|
|
|
++ "Instead only use 'allowed_methods'"
|
|
|
|
++ )
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Using 'method_whitelist' with Retry is deprecated and "
|
|
|
|
++ "will be removed in v2.0. Use 'allowed_methods' instead",
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++ allowed_methods = method_whitelist
|
|
|
|
++ if allowed_methods is _Default:
|
|
|
|
++ allowed_methods = self.DEFAULT_ALLOWED_METHODS
|
|
|
|
++ if remove_headers_on_redirect is _Default:
|
|
|
|
++ remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
|
|
|
|
++
|
|
|
|
+ self.total = total
|
|
|
|
+ self.connect = connect
|
|
|
|
+ self.read = read
|
|
|
|
+ self.status = status
|
|
|
|
++ self.other = other
|
|
|
|
+
|
|
|
|
+ if redirect is False or total is False:
|
|
|
|
+ redirect = 0
|
|
|
|
+ raise_on_redirect = False
|
|
|
|
+
|
|
|
|
+ self.redirect = redirect
|
|
|
|
+ self.status_forcelist = status_forcelist or set()
|
|
|
|
+- self.method_whitelist = method_whitelist
|
|
|
|
++ self.allowed_methods = allowed_methods
|
|
|
|
+ self.backoff_factor = backoff_factor
|
|
|
|
+ self.raise_on_redirect = raise_on_redirect
|
|
|
|
+ self.raise_on_status = raise_on_status
|
|
|
|
+ self.history = history or tuple()
|
|
|
|
+ self.respect_retry_after_header = respect_retry_after_header
|
|
|
|
+ self.remove_headers_on_redirect = frozenset(
|
|
|
|
+ [h.lower() for h in remove_headers_on_redirect]
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ def new(self, **kw):
|
|
|
|
+ params = dict(
|
|
|
|
+ total=self.total,
|
|
|
|
+ connect=self.connect,
|
|
|
|
+ read=self.read,
|
|
|
|
+ redirect=self.redirect,
|
|
|
|
+ status=self.status,
|
|
|
|
+- method_whitelist=self.method_whitelist,
|
|
|
|
++ other=self.other,
|
|
|
|
+ status_forcelist=self.status_forcelist,
|
|
|
|
+ backoff_factor=self.backoff_factor,
|
|
|
|
+ raise_on_redirect=self.raise_on_redirect,
|
|
|
|
+ raise_on_status=self.raise_on_status,
|
|
|
|
+ history=self.history,
|
|
|
|
+ remove_headers_on_redirect=self.remove_headers_on_redirect,
|
|
|
|
+ respect_retry_after_header=self.respect_retry_after_header,
|
|
|
|
+ )
|
|
|
|
++
|
|
|
|
++ # TODO: If already given in **kw we use what's given to us
|
|
|
|
++ # If not given we need to figure out what to pass. We decide
|
|
|
|
++ # based on whether our class has the 'method_whitelist' property
|
|
|
|
++ # and if so we pass the deprecated 'method_whitelist' otherwise
|
|
|
|
++ # we use 'allowed_methods'. Remove in v2.0
|
|
|
|
++ if "method_whitelist" not in kw and "allowed_methods" not in kw:
|
|
|
|
++ if "method_whitelist" in self.__dict__:
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Using 'method_whitelist' with Retry is deprecated and "
|
|
|
|
++ "will be removed in v2.0. Use 'allowed_methods' instead",
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++ params["method_whitelist"] = self.allowed_methods
|
|
|
|
++ else:
|
|
|
|
++ params["allowed_methods"] = self.allowed_methods
|
|
|
|
++
|
|
|
|
+ params.update(kw)
|
|
|
|
+ return type(self)(**params)
|
|
|
|
+
|
|
|
|
+ @classmethod
|
|
|
|
+ def from_int(cls, retries, redirect=True, default=None):
|
|
|
|
+ """ Backwards-compatibility for the old retries format."""
|
|
|
|
+ if retries is None:
|
|
|
|
+ retries = default if default is not None else cls.DEFAULT
|
|
|
|
+@@ -226,17 +328,17 @@ class Retry(object):
|
|
|
|
+ return retries
|
|
|
|
+
|
|
|
|
+ redirect = bool(redirect) and None
|
|
|
|
+ new_retries = cls(retries, redirect=redirect)
|
|
|
|
+ log.debug("Converted retries value: %r -> %r", retries, new_retries)
|
|
|
|
+ return new_retries
|
|
|
|
+
|
|
|
|
+ def get_backoff_time(self):
|
|
|
|
+- """ Formula for computing the current backoff
|
|
|
|
++ """Formula for computing the current backoff
|
|
|
|
+
|
|
|
|
+ :rtype: float
|
|
|
|
+ """
|
|
|
|
+ # We want to consider only the last consecutive errors sequence (Ignore redirects).
|
|
|
|
+ consecutive_errors_len = len(
|
|
|
|
+ list(
|
|
|
|
+ takewhile(lambda x: x.redirect_location is None, reversed(self.history))
|
|
|
|
+ )
|
|
|
|
+@@ -247,20 +349,27 @@ class Retry(object):
|
|
|
|
+ backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
|
|
|
|
+ return min(self.BACKOFF_MAX, backoff_value)
|
|
|
|
+
|
|
|
|
+ def parse_retry_after(self, retry_after):
|
|
|
|
+ # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
|
|
|
|
+ if re.match(r"^\s*[0-9]+\s*$", retry_after):
|
|
|
|
+ seconds = int(retry_after)
|
|
|
|
+ else:
|
|
|
|
+- retry_date_tuple = email.utils.parsedate(retry_after)
|
|
|
|
++ retry_date_tuple = email.utils.parsedate_tz(retry_after)
|
|
|
|
+ if retry_date_tuple is None:
|
|
|
|
+ raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
|
|
|
|
+- retry_date = time.mktime(retry_date_tuple)
|
|
|
|
++ if retry_date_tuple[9] is None: # Python 2
|
|
|
|
++ # Assume UTC if no timezone was specified
|
|
|
|
++ # On Python2.7, parsedate_tz returns None for a timezone offset
|
|
|
|
++ # instead of 0 if no timezone is given, where mktime_tz treats
|
|
|
|
++ # a None timezone offset as local time.
|
|
|
|
++ retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
|
|
|
|
++
|
|
|
|
++ retry_date = email.utils.mktime_tz(retry_date_tuple)
|
|
|
|
+ seconds = retry_date - time.time()
|
|
|
|
+
|
|
|
|
+ if seconds < 0:
|
|
|
|
+ seconds = 0
|
|
|
|
+
|
|
|
|
+ return seconds
|
|
|
|
+
|
|
|
|
+ def get_retry_after(self, response):
|
|
|
|
+@@ -283,56 +392,67 @@ class Retry(object):
|
|
|
|
+
|
|
|
|
+ def _sleep_backoff(self):
|
|
|
|
+ backoff = self.get_backoff_time()
|
|
|
|
+ if backoff <= 0:
|
|
|
|
+ return
|
|
|
|
+ time.sleep(backoff)
|
|
|
|
+
|
|
|
|
+ def sleep(self, response=None):
|
|
|
|
+- """ Sleep between retry attempts.
|
|
|
|
++ """Sleep between retry attempts.
|
|
|
|
+
|
|
|
|
+ This method will respect a server's ``Retry-After`` response header
|
|
|
|
+ and sleep the duration of the time requested. If that is not present, it
|
|
|
|
+ will use an exponential backoff. By default, the backoff factor is 0 and
|
|
|
|
+ this method will return immediately.
|
|
|
|
+ """
|
|
|
|
+
|
|
|
|
+ if self.respect_retry_after_header and response:
|
|
|
|
+ slept = self.sleep_for_retry(response)
|
|
|
|
+ if slept:
|
|
|
|
+ return
|
|
|
|
+
|
|
|
|
+ self._sleep_backoff()
|
|
|
|
+
|
|
|
|
+ def _is_connection_error(self, err):
|
|
|
|
+- """ Errors when we're fairly sure that the server did not receive the
|
|
|
|
++ """Errors when we're fairly sure that the server did not receive the
|
|
|
|
+ request, so it should be safe to retry.
|
|
|
|
+ """
|
|
|
|
+ if isinstance(err, ProxyError):
|
|
|
|
+ err = err.original_error
|
|
|
|
+ return isinstance(err, ConnectTimeoutError)
|
|
|
|
+
|
|
|
|
+ def _is_read_error(self, err):
|
|
|
|
+- """ Errors that occur after the request has been started, so we should
|
|
|
|
++ """Errors that occur after the request has been started, so we should
|
|
|
|
+ assume that the server began processing it.
|
|
|
|
+ """
|
|
|
|
+ return isinstance(err, (ReadTimeoutError, ProtocolError))
|
|
|
|
+
|
|
|
|
+ def _is_method_retryable(self, method):
|
|
|
|
+- """ Checks if a given HTTP method should be retried upon, depending if
|
|
|
|
+- it is included on the method whitelist.
|
|
|
|
++ """Checks if a given HTTP method should be retried upon, depending if
|
|
|
|
++ it is included in the allowed_methods
|
|
|
|
+ """
|
|
|
|
+- if self.method_whitelist and method.upper() not in self.method_whitelist:
|
|
|
|
++ # TODO: For now favor if the Retry implementation sets its own method_whitelist
|
|
|
|
++ # property outside of our constructor to avoid breaking custom implementations.
|
|
|
|
++ if "method_whitelist" in self.__dict__:
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Using 'method_whitelist' with Retry is deprecated and "
|
|
|
|
++ "will be removed in v2.0. Use 'allowed_methods' instead",
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++ allowed_methods = self.method_whitelist
|
|
|
|
++ else:
|
|
|
|
++ allowed_methods = self.allowed_methods
|
|
|
|
++
|
|
|
|
++ if allowed_methods and method.upper() not in allowed_methods:
|
|
|
|
+ return False
|
|
|
|
+-
|
|
|
|
+ return True
|
|
|
|
+
|
|
|
|
+ def is_retry(self, method, status_code, has_retry_after=False):
|
|
|
|
+- """ Is this method/status code retryable? (Based on whitelists and control
|
|
|
|
++ """Is this method/status code retryable? (Based on allowlists and control
|
|
|
|
+ variables such as the number of total retries to allow, whether to
|
|
|
|
+ respect the Retry-After header, whether this header is present, and
|
|
|
|
+ whether the returned status code is on the list of status codes to
|
|
|
|
+ be retried upon on the presence of the aforementioned header)
|
|
|
|
+ """
|
|
|
|
+ if not self._is_method_retryable(method):
|
|
|
|
+ return False
|
|
|
|
+
|
|
|
|
+@@ -343,33 +463,40 @@ class Retry(object):
|
|
|
|
+ self.total
|
|
|
|
+ and self.respect_retry_after_header
|
|
|
|
+ and has_retry_after
|
|
|
|
+ and (status_code in self.RETRY_AFTER_STATUS_CODES)
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ def is_exhausted(self):
|
|
|
|
+ """ Are we out of retries? """
|
|
|
|
+- retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
|
|
|
|
++ retry_counts = (
|
|
|
|
++ self.total,
|
|
|
|
++ self.connect,
|
|
|
|
++ self.read,
|
|
|
|
++ self.redirect,
|
|
|
|
++ self.status,
|
|
|
|
++ self.other,
|
|
|
|
++ )
|
|
|
|
+ retry_counts = list(filter(None, retry_counts))
|
|
|
|
+ if not retry_counts:
|
|
|
|
+ return False
|
|
|
|
+
|
|
|
|
+ return min(retry_counts) < 0
|
|
|
|
+
|
|
|
|
+ def increment(
|
|
|
|
+ self,
|
|
|
|
+ method=None,
|
|
|
|
+ url=None,
|
|
|
|
+ response=None,
|
|
|
|
+ error=None,
|
|
|
|
+ _pool=None,
|
|
|
|
+ _stacktrace=None,
|
|
|
|
+ ):
|
|
|
|
+- """ Return a new Retry object with incremented retry counters.
|
|
|
|
++ """Return a new Retry object with incremented retry counters.
|
|
|
|
+
|
|
|
|
+ :param response: A response object, or None, if the server did not
|
|
|
|
+ return a response.
|
|
|
|
+ :type response: :class:`~urllib3.response.HTTPResponse`
|
|
|
|
+ :param Exception error: An error encountered during the request, or
|
|
|
|
+ None if the response was received successfully.
|
|
|
|
+
|
|
|
|
+ :return: A new ``Retry`` object.
|
|
|
|
+@@ -381,16 +508,17 @@ class Retry(object):
|
|
|
|
+ total = self.total
|
|
|
|
+ if total is not None:
|
|
|
|
+ total -= 1
|
|
|
|
+
|
|
|
|
+ connect = self.connect
|
|
|
|
+ read = self.read
|
|
|
|
+ redirect = self.redirect
|
|
|
|
+ status_count = self.status
|
|
|
|
++ other = self.other
|
|
|
|
+ cause = "unknown"
|
|
|
|
+ status = None
|
|
|
|
+ redirect_location = None
|
|
|
|
+
|
|
|
|
+ if error and self._is_connection_error(error):
|
|
|
|
+ # Connect retry?
|
|
|
|
+ if connect is False:
|
|
|
|
+ raise six.reraise(type(error), error, _stacktrace)
|
|
|
|
+@@ -399,27 +527,32 @@ class Retry(object):
|
|
|
|
+
|
|
|
|
+ elif error and self._is_read_error(error):
|
|
|
|
+ # Read retry?
|
|
|
|
+ if read is False or not self._is_method_retryable(method):
|
|
|
|
+ raise six.reraise(type(error), error, _stacktrace)
|
|
|
|
+ elif read is not None:
|
|
|
|
+ read -= 1
|
|
|
|
+
|
|
|
|
++ elif error:
|
|
|
|
++ # Other retry?
|
|
|
|
++ if other is not None:
|
|
|
|
++ other -= 1
|
|
|
|
++
|
|
|
|
+ elif response and response.get_redirect_location():
|
|
|
|
+ # Redirect retry?
|
|
|
|
+ if redirect is not None:
|
|
|
|
+ redirect -= 1
|
|
|
|
+ cause = "too many redirects"
|
|
|
|
+ redirect_location = response.get_redirect_location()
|
|
|
|
+ status = response.status
|
|
|
|
+
|
|
|
|
+ else:
|
|
|
|
+ # Incrementing because of a server error like a 500 in
|
|
|
|
+- # status_forcelist and a the given method is in the whitelist
|
|
|
|
++ # status_forcelist and the given method is in the allowed_methods
|
|
|
|
+ cause = ResponseError.GENERIC_ERROR
|
|
|
|
+ if response and response.status:
|
|
|
|
+ if status_count is not None:
|
|
|
|
+ status_count -= 1
|
|
|
|
+ cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
|
|
|
|
+ status = response.status
|
|
|
|
+
|
|
|
|
+ history = self.history + (
|
|
|
|
+@@ -427,27 +560,42 @@ class Retry(object):
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ new_retry = self.new(
|
|
|
|
+ total=total,
|
|
|
|
+ connect=connect,
|
|
|
|
+ read=read,
|
|
|
|
+ redirect=redirect,
|
|
|
|
+ status=status_count,
|
|
|
|
++ other=other,
|
|
|
|
+ history=history,
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ if new_retry.is_exhausted():
|
|
|
|
+ raise MaxRetryError(_pool, url, error or ResponseError(cause))
|
|
|
|
+
|
|
|
|
+ log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
|
|
|
|
+
|
|
|
|
+ return new_retry
|
|
|
|
+
|
|
|
|
+ def __repr__(self):
|
|
|
|
+ return (
|
|
|
|
+ "{cls.__name__}(total={self.total}, connect={self.connect}, "
|
|
|
|
+ "read={self.read}, redirect={self.redirect}, status={self.status})"
|
|
|
|
+ ).format(cls=type(self), self=self)
|
|
|
|
+
|
|
|
|
++ def __getattr__(self, item):
|
|
|
|
++ if item == "method_whitelist":
|
|
|
|
++ # TODO: Remove this deprecated alias in v2.0
|
|
|
|
++ warnings.warn(
|
|
|
|
++ "Using 'method_whitelist' with Retry is deprecated and "
|
|
|
|
++ "will be removed in v2.0. Use 'allowed_methods' instead",
|
|
|
|
++ DeprecationWarning,
|
|
|
|
++ )
|
|
|
|
++ return self.allowed_methods
|
|
|
|
++ try:
|
|
|
|
++ return getattr(super(Retry, self), item)
|
|
|
|
++ except AttributeError:
|
|
|
|
++ return getattr(Retry, item)
|
|
|
|
++
|
|
|
|
+
|
|
|
|
+ # For backwards compatibility (equivalent to pre-v1.9):
|
|
|
|
+ Retry.DEFAULT = Retry(3)
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/ssl_.py b/third_party/python/urllib3/urllib3/util/ssl_.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/ssl_.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/ssl_.py
|
|
|
|
+@@ -1,50 +1,58 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+-import errno
|
|
|
|
++
|
|
|
|
++import hmac
|
|
|
|
++import os
|
|
|
|
++import sys
|
|
|
|
+ import warnings
|
|
|
|
+-import hmac
|
|
|
|
+-import sys
|
|
|
|
+-
|
|
|
|
+ from binascii import hexlify, unhexlify
|
|
|
|
+ from hashlib import md5, sha1, sha256
|
|
|
|
+
|
|
|
|
+-from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE
|
|
|
|
+-from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
|
|
|
|
++from ..exceptions import (
|
|
|
|
++ InsecurePlatformWarning,
|
|
|
|
++ ProxySchemeUnsupported,
|
|
|
|
++ SNIMissingWarning,
|
|
|
|
++ SSLError,
|
|
|
|
++)
|
|
|
|
+ from ..packages import six
|
|
|
|
+-
|
|
|
|
++from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
|
|
|
|
+
|
|
|
|
+ SSLContext = None
|
|
|
|
++SSLTransport = None
|
|
|
|
+ HAS_SNI = False
|
|
|
|
+ IS_PYOPENSSL = False
|
|
|
|
+ IS_SECURETRANSPORT = False
|
|
|
|
++ALPN_PROTOCOLS = ["http/1.1"]
|
|
|
|
+
|
|
|
|
+ # Maps the length of a digest to a possible hash function producing this digest
|
|
|
|
+ HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def _const_compare_digest_backport(a, b):
|
|
|
|
+ """
|
|
|
|
+ Compare two digests of equal length in constant time.
|
|
|
|
+
|
|
|
|
+ The digests must be of type str/bytes.
|
|
|
|
+ Returns True if the digests match, and False otherwise.
|
|
|
|
+ """
|
|
|
|
+ result = abs(len(a) - len(b))
|
|
|
|
+- for l, r in zip(bytearray(a), bytearray(b)):
|
|
|
|
+- result |= l ^ r
|
|
|
|
++ for left, right in zip(bytearray(a), bytearray(b)):
|
|
|
|
++ result |= left ^ right
|
|
|
|
+ return result == 0
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ _const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
|
|
|
|
+
|
|
|
|
+ try: # Test for SSL features
|
|
|
|
+ import ssl
|
|
|
|
+- from ssl import wrap_socket, CERT_REQUIRED
|
|
|
|
+ from ssl import HAS_SNI # Has SNI?
|
|
|
|
++ from ssl import CERT_REQUIRED, wrap_socket
|
|
|
|
++
|
|
|
|
++ from .ssltransport import SSLTransport
|
|
|
|
+ except ImportError:
|
|
|
|
+ pass
|
|
|
|
+
|
|
|
|
+ try: # Platform-specific: Python 3.6
|
|
|
|
+ from ssl import PROTOCOL_TLS
|
|
|
|
+
|
|
|
|
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
|
|
|
|
+ except ImportError:
|
|
|
|
+@@ -52,22 +60,28 @@ except ImportError:
|
|
|
|
+ from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
|
|
|
|
+
|
|
|
|
+ PROTOCOL_SSLv23 = PROTOCOL_TLS
|
|
|
|
+ except ImportError:
|
|
|
|
+ PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ try:
|
|
|
|
+- from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
|
|
|
|
++ from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
|
|
|
|
+ except ImportError:
|
|
|
|
+ OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
|
|
|
|
+ OP_NO_COMPRESSION = 0x20000
|
|
|
|
+
|
|
|
|
+
|
|
|
|
++try: # OP_NO_TICKET was added in Python 3.6
|
|
|
|
++ from ssl import OP_NO_TICKET
|
|
|
|
++except ImportError:
|
|
|
|
++ OP_NO_TICKET = 0x4000
|
|
|
|
++
|
|
|
|
++
|
|
|
|
+ # A secure default.
|
|
|
|
+ # Sources for more information on TLS ciphers:
|
|
|
|
+ #
|
|
|
|
+ # - https://wiki.mozilla.org/Security/Server_Side_TLS
|
|
|
|
+ # - https://www.ssllabs.com/projects/best-practices/index.html
|
|
|
|
+ # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
|
|
|
|
+ #
|
|
|
|
+ # The general intent is:
|
|
|
|
+@@ -244,17 +258,17 @@ def create_urllib3_context(
|
|
|
|
+ The desired protocol version to use. This will default to
|
|
|
|
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
|
|
|
|
+ the server and your installation of OpenSSL support.
|
|
|
|
+ :param cert_reqs:
|
|
|
|
+ Whether to require the certificate verification. This defaults to
|
|
|
|
+ ``ssl.CERT_REQUIRED``.
|
|
|
|
+ :param options:
|
|
|
|
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
|
|
|
|
+- ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
|
|
|
|
++ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
|
|
|
|
+ :param ciphers:
|
|
|
|
+ Which cipher suites to allow the server to select.
|
|
|
|
+ :returns:
|
|
|
|
+ Constructed SSLContext object with specified options
|
|
|
|
+ :rtype: SSLContext
|
|
|
|
+ """
|
|
|
|
+ context = SSLContext(ssl_version or PROTOCOL_TLS)
|
|
|
|
+
|
|
|
|
+@@ -267,16 +281,21 @@ def create_urllib3_context(
|
|
|
|
+ options = 0
|
|
|
|
+ # SSLv2 is easily broken and is considered harmful and dangerous
|
|
|
|
+ options |= OP_NO_SSLv2
|
|
|
|
+ # SSLv3 has several problems and is now dangerous
|
|
|
|
+ options |= OP_NO_SSLv3
|
|
|
|
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
|
|
|
|
+ # (issue #309)
|
|
|
|
+ options |= OP_NO_COMPRESSION
|
|
|
|
++ # TLSv1.2 only. Unless set explicitly, do not request tickets.
|
|
|
|
++ # This may save some bandwidth on wire, and although the ticket is encrypted,
|
|
|
|
++ # there is a risk associated with it being on wire,
|
|
|
|
++ # if the server is not rotating its ticketing keys properly.
|
|
|
|
++ options |= OP_NO_TICKET
|
|
|
|
+
|
|
|
|
+ context.options |= options
|
|
|
|
+
|
|
|
|
+ # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
|
|
|
|
+ # necessary for conditional client cert authentication with TLS 1.3.
|
|
|
|
+ # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
|
|
|
|
+ # versions of Python. We only enable on Python 3.7.4+ or if certificate
|
|
|
|
+ # verification is enabled to work around Python issue #37428
|
|
|
|
+@@ -288,32 +307,41 @@ def create_urllib3_context(
|
|
|
|
+
|
|
|
|
+ context.verify_mode = cert_reqs
|
|
|
|
+ if (
|
|
|
|
+ getattr(context, "check_hostname", None) is not None
|
|
|
|
+ ): # Platform-specific: Python 3.2
|
|
|
|
+ # We do our own verification, including fingerprints and alternative
|
|
|
|
+ # hostnames. So disable it here
|
|
|
|
+ context.check_hostname = False
|
|
|
|
++
|
|
|
|
++ # Enable logging of TLS session keys via defacto standard environment variable
|
|
|
|
++ # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
|
|
|
|
++ if hasattr(context, "keylog_filename"):
|
|
|
|
++ sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
|
|
|
|
++ if sslkeylogfile:
|
|
|
|
++ context.keylog_filename = sslkeylogfile
|
|
|
|
++
|
|
|
|
+ return context
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def ssl_wrap_socket(
|
|
|
|
+ sock,
|
|
|
|
+ keyfile=None,
|
|
|
|
+ certfile=None,
|
|
|
|
+ cert_reqs=None,
|
|
|
|
+ ca_certs=None,
|
|
|
|
+ server_hostname=None,
|
|
|
|
+ ssl_version=None,
|
|
|
|
+ ciphers=None,
|
|
|
|
+ ssl_context=None,
|
|
|
|
+ ca_cert_dir=None,
|
|
|
|
+ key_password=None,
|
|
|
|
+ ca_cert_data=None,
|
|
|
|
++ tls_in_tls=False,
|
|
|
|
+ ):
|
|
|
|
+ """
|
|
|
|
+ All arguments except for server_hostname, ssl_context, and ca_cert_dir have
|
|
|
|
+ the same meaning as they do when using :func:`ssl.wrap_socket`.
|
|
|
|
+
|
|
|
|
+ :param server_hostname:
|
|
|
|
+ When SNI is supported, the expected hostname of the certificate
|
|
|
|
+ :param ssl_context:
|
|
|
|
+@@ -325,35 +353,31 @@ def ssl_wrap_socket(
|
|
|
|
+ A directory containing CA certificates in multiple separate files, as
|
|
|
|
+ supported by OpenSSL's -CApath flag or the capath argument to
|
|
|
|
+ SSLContext.load_verify_locations().
|
|
|
|
+ :param key_password:
|
|
|
|
+ Optional password if the keyfile is encrypted.
|
|
|
|
+ :param ca_cert_data:
|
|
|
|
+ Optional string containing CA certificates in PEM format suitable for
|
|
|
|
+ passing as the cadata parameter to SSLContext.load_verify_locations()
|
|
|
|
++ :param tls_in_tls:
|
|
|
|
++ Use SSLTransport to wrap the existing socket.
|
|
|
|
+ """
|
|
|
|
+ context = ssl_context
|
|
|
|
+ if context is None:
|
|
|
|
+ # Note: This branch of code and all the variables in it are no longer
|
|
|
|
+ # used by urllib3 itself. We should consider deprecating and removing
|
|
|
|
+ # this code.
|
|
|
|
+ context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
|
|
|
|
+
|
|
|
|
+ if ca_certs or ca_cert_dir or ca_cert_data:
|
|
|
|
+ try:
|
|
|
|
+ context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
|
|
|
|
+- except IOError as e: # Platform-specific: Python 2.7
|
|
|
|
++ except (IOError, OSError) as e:
|
|
|
|
+ raise SSLError(e)
|
|
|
|
+- # Py33 raises FileNotFoundError which subclasses OSError
|
|
|
|
+- # These are not equivalent unless we check the errno attribute
|
|
|
|
+- except OSError as e: # Platform-specific: Python 3.3 and beyond
|
|
|
|
+- if e.errno == errno.ENOENT:
|
|
|
|
+- raise SSLError(e)
|
|
|
|
+- raise
|
|
|
|
+
|
|
|
|
+ elif ssl_context is None and hasattr(context, "load_default_certs"):
|
|
|
|
+ # try to load OS default certs; works well on Windows (require Python3.4+)
|
|
|
|
+ context.load_default_certs()
|
|
|
|
+
|
|
|
|
+ # Attempt to detect if we get the goofy behavior of the
|
|
|
|
+ # keyfile being encrypted and OpenSSL asking for the
|
|
|
|
+ # passphrase via the terminal and instead error out.
|
|
|
|
+@@ -361,38 +385,49 @@ def ssl_wrap_socket(
|
|
|
|
+ raise SSLError("Client private key is encrypted, password is required")
|
|
|
|
+
|
|
|
|
+ if certfile:
|
|
|
|
+ if key_password is None:
|
|
|
|
+ context.load_cert_chain(certfile, keyfile)
|
|
|
|
+ else:
|
|
|
|
+ context.load_cert_chain(certfile, keyfile, key_password)
|
|
|
|
+
|
|
|
|
++ try:
|
|
|
|
++ if hasattr(context, "set_alpn_protocols"):
|
|
|
|
++ context.set_alpn_protocols(ALPN_PROTOCOLS)
|
|
|
|
++ except NotImplementedError:
|
|
|
|
++ pass
|
|
|
|
++
|
|
|
|
+ # If we detect server_hostname is an IP address then the SNI
|
|
|
|
+ # extension should not be used according to RFC3546 Section 3.1
|
|
|
|
+- # We shouldn't warn the user if SNI isn't available but we would
|
|
|
|
+- # not be using SNI anyways due to IP address for server_hostname.
|
|
|
|
+- if (
|
|
|
|
+- server_hostname is not None and not is_ipaddress(server_hostname)
|
|
|
|
+- ) or IS_SECURETRANSPORT:
|
|
|
|
+- if HAS_SNI and server_hostname is not None:
|
|
|
|
+- return context.wrap_socket(sock, server_hostname=server_hostname)
|
|
|
|
+-
|
|
|
|
++ use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
|
|
|
|
++ # SecureTransport uses server_hostname in certificate verification.
|
|
|
|
++ send_sni = (use_sni_hostname and HAS_SNI) or (
|
|
|
|
++ IS_SECURETRANSPORT and server_hostname
|
|
|
|
++ )
|
|
|
|
++ # Do not warn the user if server_hostname is an invalid SNI hostname.
|
|
|
|
++ if not HAS_SNI and use_sni_hostname:
|
|
|
|
+ warnings.warn(
|
|
|
|
+ "An HTTPS request has been made, but the SNI (Server Name "
|
|
|
|
+ "Indication) extension to TLS is not available on this platform. "
|
|
|
|
+ "This may cause the server to present an incorrect TLS "
|
|
|
|
+ "certificate, which can cause validation failures. You can upgrade to "
|
|
|
|
+ "a newer version of Python to solve this. For more information, see "
|
|
|
|
+ "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
|
|
|
|
+ "#ssl-warnings",
|
|
|
|
+ SNIMissingWarning,
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+- return context.wrap_socket(sock)
|
|
|
|
++ if send_sni:
|
|
|
|
++ ssl_sock = _ssl_wrap_socket_impl(
|
|
|
|
++ sock, context, tls_in_tls, server_hostname=server_hostname
|
|
|
|
++ )
|
|
|
|
++ else:
|
|
|
|
++ ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
|
|
|
|
++ return ssl_sock
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def is_ipaddress(hostname):
|
|
|
|
+ """Detects whether the hostname given is an IPv4 or IPv6 address.
|
|
|
|
+ Also detects IPv6 addresses with Zone IDs.
|
|
|
|
+
|
|
|
|
+ :param str hostname: Hostname to examine.
|
|
|
|
+ :return: True if the hostname is an IP address, False otherwise.
|
|
|
|
+@@ -407,8 +442,25 @@ def _is_key_file_encrypted(key_file):
|
|
|
|
+ """Detects if a key file is encrypted or not."""
|
|
|
|
+ with open(key_file, "r") as f:
|
|
|
|
+ for line in f:
|
|
|
|
+ # Look for Proc-Type: 4,ENCRYPTED
|
|
|
|
+ if "ENCRYPTED" in line:
|
|
|
|
+ return True
|
|
|
|
+
|
|
|
|
+ return False
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
|
|
|
|
++ if tls_in_tls:
|
|
|
|
++ if not SSLTransport:
|
|
|
|
++ # Import error, ssl is not available.
|
|
|
|
++ raise ProxySchemeUnsupported(
|
|
|
|
++ "TLS in TLS requires support for the 'ssl' module"
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++ SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
|
|
|
|
++ return SSLTransport(sock, ssl_context, server_hostname)
|
|
|
|
++
|
|
|
|
++ if server_hostname:
|
|
|
|
++ return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
|
|
|
|
++ else:
|
|
|
|
++ return ssl_context.wrap_socket(sock)
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/ssltransport.py b/third_party/python/urllib3/urllib3/util/ssltransport.py
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/ssltransport.py
|
|
|
|
+@@ -0,0 +1,221 @@
|
|
|
|
++import io
|
|
|
|
++import socket
|
|
|
|
++import ssl
|
|
|
|
++
|
|
|
|
++from urllib3.exceptions import ProxySchemeUnsupported
|
|
|
|
++from urllib3.packages import six
|
|
|
|
++
|
|
|
|
++SSL_BLOCKSIZE = 16384
|
|
|
|
++
|
|
|
|
++
|
|
|
|
++class SSLTransport:
|
|
|
|
++ """
|
|
|
|
++ The SSLTransport wraps an existing socket and establishes an SSL connection.
|
|
|
|
++
|
|
|
|
++ Contrary to Python's implementation of SSLSocket, it allows you to chain
|
|
|
|
++ multiple TLS connections together. It's particularly useful if you need to
|
|
|
|
++ implement TLS within TLS.
|
|
|
|
++
|
|
|
|
++ The class supports most of the socket API operations.
|
|
|
|
++ """
|
|
|
|
++
|
|
|
|
++ @staticmethod
|
|
|
|
++ def _validate_ssl_context_for_tls_in_tls(ssl_context):
|
|
|
|
++ """
|
|
|
|
++ Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
|
|
|
|
++ for TLS in TLS.
|
|
|
|
++
|
|
|
|
++ The only requirement is that the ssl_context provides the 'wrap_bio'
|
|
|
|
++ methods.
|
|
|
|
++ """
|
|
|
|
++
|
|
|
|
++ if not hasattr(ssl_context, "wrap_bio"):
|
|
|
|
++ if six.PY2:
|
|
|
|
++ raise ProxySchemeUnsupported(
|
|
|
|
++ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
|
|
|
|
++ "supported on Python 2"
|
|
|
|
++ )
|
|
|
|
++ else:
|
|
|
|
++ raise ProxySchemeUnsupported(
|
|
|
|
++ "TLS in TLS requires SSLContext.wrap_bio() which isn't "
|
|
|
|
++ "available on non-native SSLContext"
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++ def __init__(
|
|
|
|
++ self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
|
|
|
|
++ ):
|
|
|
|
++ """
|
|
|
|
++ Create an SSLTransport around socket using the provided ssl_context.
|
|
|
|
++ """
|
|
|
|
++ self.incoming = ssl.MemoryBIO()
|
|
|
|
++ self.outgoing = ssl.MemoryBIO()
|
|
|
|
++
|
|
|
|
++ self.suppress_ragged_eofs = suppress_ragged_eofs
|
|
|
|
++ self.socket = socket
|
|
|
|
++
|
|
|
|
++ self.sslobj = ssl_context.wrap_bio(
|
|
|
|
++ self.incoming, self.outgoing, server_hostname=server_hostname
|
|
|
|
++ )
|
|
|
|
++
|
|
|
|
++ # Perform initial handshake.
|
|
|
|
++ self._ssl_io_loop(self.sslobj.do_handshake)
|
|
|
|
++
|
|
|
|
++ def __enter__(self):
|
|
|
|
++ return self
|
|
|
|
++
|
|
|
|
++ def __exit__(self, *_):
|
|
|
|
++ self.close()
|
|
|
|
++
|
|
|
|
++ def fileno(self):
|
|
|
|
++ return self.socket.fileno()
|
|
|
|
++
|
|
|
|
++ def read(self, len=1024, buffer=None):
|
|
|
|
++ return self._wrap_ssl_read(len, buffer)
|
|
|
|
++
|
|
|
|
++ def recv(self, len=1024, flags=0):
|
|
|
|
++ if flags != 0:
|
|
|
|
++ raise ValueError("non-zero flags not allowed in calls to recv")
|
|
|
|
++ return self._wrap_ssl_read(len)
|
|
|
|
++
|
|
|
|
++ def recv_into(self, buffer, nbytes=None, flags=0):
|
|
|
|
++ if flags != 0:
|
|
|
|
++ raise ValueError("non-zero flags not allowed in calls to recv_into")
|
|
|
|
++ if buffer and (nbytes is None):
|
|
|
|
++ nbytes = len(buffer)
|
|
|
|
++ elif nbytes is None:
|
|
|
|
++ nbytes = 1024
|
|
|
|
++ return self.read(nbytes, buffer)
|
|
|
|
++
|
|
|
|
++ def sendall(self, data, flags=0):
|
|
|
|
++ if flags != 0:
|
|
|
|
++ raise ValueError("non-zero flags not allowed in calls to sendall")
|
|
|
|
++ count = 0
|
|
|
|
++ with memoryview(data) as view, view.cast("B") as byte_view:
|
|
|
|
++ amount = len(byte_view)
|
|
|
|
++ while count < amount:
|
|
|
|
++ v = self.send(byte_view[count:])
|
|
|
|
++ count += v
|
|
|
|
++
|
|
|
|
++ def send(self, data, flags=0):
|
|
|
|
++ if flags != 0:
|
|
|
|
++ raise ValueError("non-zero flags not allowed in calls to send")
|
|
|
|
++ response = self._ssl_io_loop(self.sslobj.write, data)
|
|
|
|
++ return response
|
|
|
|
++
|
|
|
|
++ def makefile(
|
|
|
|
++ self, mode="r", buffering=None, encoding=None, errors=None, newline=None
|
|
|
|
++ ):
|
|
|
|
++ """
|
|
|
|
++ Python's httpclient uses makefile and buffered io when reading HTTP
|
|
|
|
++ messages and we need to support it.
|
|
|
|
++
|
|
|
|
++ This is unfortunately a copy and paste of socket.py makefile with small
|
|
|
|
++ changes to point to the socket directly.
|
|
|
|
++ """
|
|
|
|
++ if not set(mode) <= {"r", "w", "b"}:
|
|
|
|
++ raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
|
|
|
|
++
|
|
|
|
++ writing = "w" in mode
|
|
|
|
++ reading = "r" in mode or not writing
|
|
|
|
++ assert reading or writing
|
|
|
|
++ binary = "b" in mode
|
|
|
|
++ rawmode = ""
|
|
|
|
++ if reading:
|
|
|
|
++ rawmode += "r"
|
|
|
|
++ if writing:
|
|
|
|
++ rawmode += "w"
|
|
|
|
++ raw = socket.SocketIO(self, rawmode)
|
|
|
|
++ self.socket._io_refs += 1
|
|
|
|
++ if buffering is None:
|
|
|
|
++ buffering = -1
|
|
|
|
++ if buffering < 0:
|
|
|
|
++ buffering = io.DEFAULT_BUFFER_SIZE
|
|
|
|
++ if buffering == 0:
|
|
|
|
++ if not binary:
|
|
|
|
++ raise ValueError("unbuffered streams must be binary")
|
|
|
|
++ return raw
|
|
|
|
++ if reading and writing:
|
|
|
|
++ buffer = io.BufferedRWPair(raw, raw, buffering)
|
|
|
|
++ elif reading:
|
|
|
|
++ buffer = io.BufferedReader(raw, buffering)
|
|
|
|
++ else:
|
|
|
|
++ assert writing
|
|
|
|
++ buffer = io.BufferedWriter(raw, buffering)
|
|
|
|
++ if binary:
|
|
|
|
++ return buffer
|
|
|
|
++ text = io.TextIOWrapper(buffer, encoding, errors, newline)
|
|
|
|
++ text.mode = mode
|
|
|
|
++ return text
|
|
|
|
++
|
|
|
|
++ def unwrap(self):
|
|
|
|
++ self._ssl_io_loop(self.sslobj.unwrap)
|
|
|
|
++
|
|
|
|
++ def close(self):
|
|
|
|
++ self.socket.close()
|
|
|
|
++
|
|
|
|
++ def getpeercert(self, binary_form=False):
|
|
|
|
++ return self.sslobj.getpeercert(binary_form)
|
|
|
|
++
|
|
|
|
++ def version(self):
|
|
|
|
++ return self.sslobj.version()
|
|
|
|
++
|
|
|
|
++ def cipher(self):
|
|
|
|
++ return self.sslobj.cipher()
|
|
|
|
++
|
|
|
|
++ def selected_alpn_protocol(self):
|
|
|
|
++ return self.sslobj.selected_alpn_protocol()
|
|
|
|
++
|
|
|
|
++ def selected_npn_protocol(self):
|
|
|
|
++ return self.sslobj.selected_npn_protocol()
|
|
|
|
++
|
|
|
|
++ def shared_ciphers(self):
|
|
|
|
++ return self.sslobj.shared_ciphers()
|
|
|
|
++
|
|
|
|
++ def compression(self):
|
|
|
|
++ return self.sslobj.compression()
|
|
|
|
++
|
|
|
|
++ def settimeout(self, value):
|
|
|
|
++ self.socket.settimeout(value)
|
|
|
|
++
|
|
|
|
++ def gettimeout(self):
|
|
|
|
++ return self.socket.gettimeout()
|
|
|
|
++
|
|
|
|
++ def _decref_socketios(self):
|
|
|
|
++ self.socket._decref_socketios()
|
|
|
|
++
|
|
|
|
++ def _wrap_ssl_read(self, len, buffer=None):
|
|
|
|
++ try:
|
|
|
|
++ return self._ssl_io_loop(self.sslobj.read, len, buffer)
|
|
|
|
++ except ssl.SSLError as e:
|
|
|
|
++ if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
|
|
|
|
++ return 0 # eof, return 0.
|
|
|
|
++ else:
|
|
|
|
++ raise
|
|
|
|
++
|
|
|
|
++ def _ssl_io_loop(self, func, *args):
|
|
|
|
++ """ Performs an I/O loop between incoming/outgoing and the socket."""
|
|
|
|
++ should_loop = True
|
|
|
|
++ ret = None
|
|
|
|
++
|
|
|
|
++ while should_loop:
|
|
|
|
++ errno = None
|
|
|
|
++ try:
|
|
|
|
++ ret = func(*args)
|
|
|
|
++ except ssl.SSLError as e:
|
|
|
|
++ if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
|
|
|
|
++ # WANT_READ, and WANT_WRITE are expected, others are not.
|
|
|
|
++ raise e
|
|
|
|
++ errno = e.errno
|
|
|
|
++
|
|
|
|
++ buf = self.outgoing.read()
|
|
|
|
++ self.socket.sendall(buf)
|
|
|
|
++
|
|
|
|
++ if errno is None:
|
|
|
|
++ should_loop = False
|
|
|
|
++ elif errno == ssl.SSL_ERROR_WANT_READ:
|
|
|
|
++ buf = self.socket.recv(SSL_BLOCKSIZE)
|
|
|
|
++ if buf:
|
|
|
|
++ self.incoming.write(buf)
|
|
|
|
++ else:
|
|
|
|
++ self.incoming.write_eof()
|
|
|
|
++ return ret
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/timeout.py b/third_party/python/urllib3/urllib3/util/timeout.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/timeout.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/timeout.py
|
|
|
|
+@@ -1,74 +1,81 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
+
|
|
|
|
++import time
|
|
|
|
++
|
|
|
|
+ # The default socket timeout, used by httplib to indicate that no timeout was
|
|
|
|
+ # specified by the user
|
|
|
|
+ from socket import _GLOBAL_DEFAULT_TIMEOUT
|
|
|
|
+-import time
|
|
|
|
+
|
|
|
|
+ from ..exceptions import TimeoutStateError
|
|
|
|
+
|
|
|
|
+ # A sentinel value to indicate that no timeout was specified by the user in
|
|
|
|
+ # urllib3
|
|
|
|
+ _Default = object()
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ # Use time.monotonic if available.
|
|
|
|
+ current_time = getattr(time, "monotonic", time.time)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ class Timeout(object):
|
|
|
|
+- """ Timeout configuration.
|
|
|
|
++ """Timeout configuration.
|
|
|
|
+
|
|
|
|
+- Timeouts can be defined as a default for a pool::
|
|
|
|
++ Timeouts can be defined as a default for a pool:
|
|
|
|
+
|
|
|
|
+- timeout = Timeout(connect=2.0, read=7.0)
|
|
|
|
+- http = PoolManager(timeout=timeout)
|
|
|
|
+- response = http.request('GET', 'http://example.com/')
|
|
|
|
++ .. code-block:: python
|
|
|
|
++
|
|
|
|
++ timeout = Timeout(connect=2.0, read=7.0)
|
|
|
|
++ http = PoolManager(timeout=timeout)
|
|
|
|
++ response = http.request('GET', 'http://example.com/')
|
|
|
|
+
|
|
|
|
+- Or per-request (which overrides the default for the pool)::
|
|
|
|
++ Or per-request (which overrides the default for the pool):
|
|
|
|
+
|
|
|
|
+- response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
|
|
|
|
++ .. code-block:: python
|
|
|
|
++
|
|
|
|
++ response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
|
|
|
|
+
|
|
|
|
+- Timeouts can be disabled by setting all the parameters to ``None``::
|
|
|
|
++ Timeouts can be disabled by setting all the parameters to ``None``:
|
|
|
|
+
|
|
|
|
+- no_timeout = Timeout(connect=None, read=None)
|
|
|
|
+- response = http.request('GET', 'http://example.com/, timeout=no_timeout)
|
|
|
|
++ .. code-block:: python
|
|
|
|
++
|
|
|
|
++ no_timeout = Timeout(connect=None, read=None)
|
|
|
|
++ response = http.request('GET', 'http://example.com/, timeout=no_timeout)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ :param total:
|
|
|
|
+ This combines the connect and read timeouts into one; the read timeout
|
|
|
|
+ will be set to the time leftover from the connect attempt. In the
|
|
|
|
+ event that both a connect timeout and a total are specified, or a read
|
|
|
|
+ timeout and a total are specified, the shorter timeout will be applied.
|
|
|
|
+
|
|
|
|
+ Defaults to None.
|
|
|
|
+
|
|
|
|
+- :type total: integer, float, or None
|
|
|
|
++ :type total: int, float, or None
|
|
|
|
+
|
|
|
|
+ :param connect:
|
|
|
|
+ The maximum amount of time (in seconds) to wait for a connection
|
|
|
|
+ attempt to a server to succeed. Omitting the parameter will default the
|
|
|
|
+ connect timeout to the system default, probably `the global default
|
|
|
|
+ timeout in socket.py
|
|
|
|
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
|
|
|
+ None will set an infinite timeout for connection attempts.
|
|
|
|
+
|
|
|
|
+- :type connect: integer, float, or None
|
|
|
|
++ :type connect: int, float, or None
|
|
|
|
+
|
|
|
|
+ :param read:
|
|
|
|
+ The maximum amount of time (in seconds) to wait between consecutive
|
|
|
|
+ read operations for a response from the server. Omitting the parameter
|
|
|
|
+ will default the read timeout to the system default, probably `the
|
|
|
|
+ global default timeout in socket.py
|
|
|
|
+ <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
|
|
|
|
+ None will set an infinite timeout.
|
|
|
|
+
|
|
|
|
+- :type read: integer, float, or None
|
|
|
|
++ :type read: int, float, or None
|
|
|
|
+
|
|
|
|
+ .. note::
|
|
|
|
+
|
|
|
|
+ Many factors can affect the total amount of time for urllib3 to return
|
|
|
|
+ an HTTP response.
|
|
|
|
+
|
|
|
|
+ For example, Python's DNS resolver does not obey the timeout specified
|
|
|
|
+ on the socket. Other factors that can affect total request time include
|
|
|
|
+@@ -106,17 +113,17 @@ class Timeout(object):
|
|
|
|
+ self.total,
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ # __str__ provided for backwards compatibility
|
|
|
|
+ __str__ = __repr__
|
|
|
|
+
|
|
|
|
+ @classmethod
|
|
|
|
+ def _validate_timeout(cls, value, name):
|
|
|
|
+- """ Check that a timeout attribute is valid.
|
|
|
|
++ """Check that a timeout attribute is valid.
|
|
|
|
+
|
|
|
|
+ :param value: The timeout value to validate
|
|
|
|
+ :param name: The name of the timeout attribute to validate. This is
|
|
|
|
+ used to specify in error messages.
|
|
|
|
+ :return: The validated and casted version of the given value.
|
|
|
|
+ :raises ValueError: If it is a numeric value less than or equal to
|
|
|
|
+ zero, or the type is not an integer, float, or None.
|
|
|
|
+ """
|
|
|
|
+@@ -152,72 +159,72 @@ class Timeout(object):
|
|
|
|
+ "Timeout value %s was %s, but it must be an "
|
|
|
|
+ "int, float or None." % (name, value)
|
|
|
|
+ )
|
|
|
|
+
|
|
|
|
+ return value
|
|
|
|
+
|
|
|
|
+ @classmethod
|
|
|
|
+ def from_float(cls, timeout):
|
|
|
|
+- """ Create a new Timeout from a legacy timeout value.
|
|
|
|
++ """Create a new Timeout from a legacy timeout value.
|
|
|
|
+
|
|
|
|
+ The timeout value used by httplib.py sets the same timeout on the
|
|
|
|
+ connect(), and recv() socket requests. This creates a :class:`Timeout`
|
|
|
|
+ object that sets the individual timeouts to the ``timeout`` value
|
|
|
|
+ passed to this function.
|
|
|
|
+
|
|
|
|
+ :param timeout: The legacy timeout value.
|
|
|
|
+ :type timeout: integer, float, sentinel default object, or None
|
|
|
|
+ :return: Timeout object
|
|
|
|
+ :rtype: :class:`Timeout`
|
|
|
|
+ """
|
|
|
|
+ return Timeout(read=timeout, connect=timeout)
|
|
|
|
+
|
|
|
|
+ def clone(self):
|
|
|
|
+- """ Create a copy of the timeout object
|
|
|
|
++ """Create a copy of the timeout object
|
|
|
|
+
|
|
|
|
+ Timeout properties are stored per-pool but each request needs a fresh
|
|
|
|
+ Timeout object to ensure each one has its own start/stop configured.
|
|
|
|
+
|
|
|
|
+ :return: a copy of the timeout object
|
|
|
|
+ :rtype: :class:`Timeout`
|
|
|
|
+ """
|
|
|
|
+ # We can't use copy.deepcopy because that will also create a new object
|
|
|
|
+ # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
|
|
|
|
+ # detect the user default.
|
|
|
|
+ return Timeout(connect=self._connect, read=self._read, total=self.total)
|
|
|
|
+
|
|
|
|
+ def start_connect(self):
|
|
|
|
+- """ Start the timeout clock, used during a connect() attempt
|
|
|
|
++ """Start the timeout clock, used during a connect() attempt
|
|
|
|
+
|
|
|
|
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
|
|
|
|
+ to start a timer that has been started already.
|
|
|
|
+ """
|
|
|
|
+ if self._start_connect is not None:
|
|
|
|
+ raise TimeoutStateError("Timeout timer has already been started.")
|
|
|
|
+ self._start_connect = current_time()
|
|
|
|
+ return self._start_connect
|
|
|
|
+
|
|
|
|
+ def get_connect_duration(self):
|
|
|
|
+- """ Gets the time elapsed since the call to :meth:`start_connect`.
|
|
|
|
++ """Gets the time elapsed since the call to :meth:`start_connect`.
|
|
|
|
+
|
|
|
|
+ :return: Elapsed time in seconds.
|
|
|
|
+ :rtype: float
|
|
|
|
+ :raises urllib3.exceptions.TimeoutStateError: if you attempt
|
|
|
|
+ to get duration for a timer that hasn't been started.
|
|
|
|
+ """
|
|
|
|
+ if self._start_connect is None:
|
|
|
|
+ raise TimeoutStateError(
|
|
|
|
+ "Can't get connect duration for timer that has not started."
|
|
|
|
+ )
|
|
|
|
+ return current_time() - self._start_connect
|
|
|
|
+
|
|
|
|
+ @property
|
|
|
|
+ def connect_timeout(self):
|
|
|
|
+- """ Get the value to use when setting a connection timeout.
|
|
|
|
++ """Get the value to use when setting a connection timeout.
|
|
|
|
+
|
|
|
|
+ This will be a positive float or integer, the value None
|
|
|
|
+ (never timeout), or the default system timeout.
|
|
|
|
+
|
|
|
|
+ :return: Connect timeout.
|
|
|
|
+ :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
|
|
|
|
+ """
|
|
|
|
+ if self.total is None:
|
|
|
|
+@@ -225,17 +232,17 @@ class Timeout(object):
|
|
|
|
+
|
|
|
|
+ if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
|
|
|
|
+ return self.total
|
|
|
|
+
|
|
|
|
+ return min(self._connect, self.total)
|
|
|
|
+
|
|
|
|
+ @property
|
|
|
|
+ def read_timeout(self):
|
|
|
|
+- """ Get the value for the read timeout.
|
|
|
|
++ """Get the value for the read timeout.
|
|
|
|
+
|
|
|
|
+ This assumes some time has elapsed in the connection timeout and
|
|
|
|
+ computes the read timeout appropriately.
|
|
|
|
+
|
|
|
|
+ If self.total is set, the read timeout is dependent on the amount of
|
|
|
|
+ time taken by the connect timeout. If the connection time has not been
|
|
|
|
+ established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
|
|
|
|
+ raised.
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/url.py b/third_party/python/urllib3/urllib3/util/url.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/url.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/url.py
|
|
|
|
+@@ -1,16 +1,16 @@
|
|
|
|
+ from __future__ import absolute_import
|
|
|
|
++
|
|
|
|
+ import re
|
|
|
|
+ from collections import namedtuple
|
|
|
|
+
|
|
|
|
+ from ..exceptions import LocationParseError
|
|
|
|
+ from ..packages import six
|
|
|
|
+
|
|
|
|
+-
|
|
|
|
+ url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
|
|
|
|
+
|
|
|
|
+ # We only want to normalize urls with an HTTP(S) scheme.
|
|
|
|
+ # urllib3 infers URLs without a scheme (None) to be http.
|
|
|
|
+ NORMALIZABLE_SCHEMES = ("http", "https", None)
|
|
|
|
+
|
|
|
|
+ # Almost all of these patterns were derived from the
|
|
|
|
+ # 'rfc3986' module: https://github.com/python-hyper/rfc3986
|
|
|
|
+diff --git a/third_party/python/urllib3/urllib3/util/wait.py b/third_party/python/urllib3/urllib3/util/wait.py
|
|
|
|
+--- a/third_party/python/urllib3/urllib3/util/wait.py
|
|
|
|
++++ b/third_party/python/urllib3/urllib3/util/wait.py
|
|
|
|
+@@ -1,12 +1,12 @@
|
|
|
|
+ import errno
|
|
|
|
+-from functools import partial
|
|
|
|
+ import select
|
|
|
|
+ import sys
|
|
|
|
++from functools import partial
|
|
|
|
+
|
|
|
|
+ try:
|
|
|
|
+ from time import monotonic
|
|
|
|
+ except ImportError:
|
|
|
|
+ from time import time as monotonic
|
|
|
|
+
|
|
|
|
+ __all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
|
|
|
|
+
|
|
|
|
+@@ -135,19 +135,19 @@ def wait_for_socket(*args, **kwargs):
|
|
|
|
+ elif hasattr(select, "select"):
|
|
|
|
+ wait_for_socket = select_wait_for_socket
|
|
|
|
+ else: # Platform-specific: Appengine.
|
|
|
|
+ wait_for_socket = null_wait_for_socket
|
|
|
|
+ return wait_for_socket(*args, **kwargs)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def wait_for_read(sock, timeout=None):
|
|
|
|
+- """ Waits for reading to be available on a given socket.
|
|
|
|
++ """Waits for reading to be available on a given socket.
|
|
|
|
+ Returns True if the socket is readable, or False if the timeout expired.
|
|
|
|
+ """
|
|
|
|
+ return wait_for_socket(sock, read=True, timeout=timeout)
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+ def wait_for_write(sock, timeout=None):
|
|
|
|
+- """ Waits for writing to be available on a given socket.
|
|
|
|
++ """Waits for writing to be available on a given socket.
|
|
|
|
+ Returns True if the socket is readable, or False if the timeout expired.
|
|
|
|
+ """
|
|
|
|
+ return wait_for_socket(sock, write=True, timeout=timeout)
|
|
|
|
+diff --git a/tools/moztreedocs/requirements.in.1715900.later b/tools/moztreedocs/requirements.in.1715900.later
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/tools/moztreedocs/requirements.in.1715900.later
|
|
|
|
+@@ -0,0 +1,19 @@
|
|
|
|
++--- requirements.in
|
|
|
|
+++++ requirements.in
|
|
|
|
++@@ -1,14 +1,14 @@
|
|
|
|
++ # pip freeze > requirements.in
|
|
|
|
++ alabaster==0.7.12
|
|
|
|
++ Babel==2.8.0
|
|
|
|
++ backports-abc==0.5
|
|
|
|
++-boto3==1.15.6
|
|
|
|
++-botocore==1.18.6
|
|
|
|
+++boto3==1.16.63
|
|
|
|
+++botocore==1.19.63
|
|
|
|
++ certifi==2020.6.20
|
|
|
|
++ chardet==3.0.4
|
|
|
|
++ colorama==0.4.4
|
|
|
|
++ commonmark==0.9.1
|
|
|
|
++ docutils==0.16
|
|
|
|
++ fluent.pygments==1.0
|
|
|
|
++ fluent.syntax==0.18.1
|
|
|
|
++ idna==2.10
|
|
|
|
+diff --git a/tools/moztreedocs/requirements.txt.1715900.later b/tools/moztreedocs/requirements.txt.1715900.later
|
|
|
|
+new file mode 100644
|
|
|
|
+--- /dev/null
|
|
|
|
++++ b/tools/moztreedocs/requirements.txt.1715900.later
|
|
|
|
+@@ -0,0 +1,32 @@
|
|
|
|
++--- requirements.txt
|
|
|
|
+++++ requirements.txt
|
|
|
|
++@@ -21,23 +21,23 @@ babel==2.8.0 \
|
|
|
|
++ --hash=sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4
|
|
|
|
++ # via
|
|
|
|
++ # -r requirements.in
|
|
|
|
++ # sphinx
|
|
|
|
++ backports-abc==0.5 \
|
|
|
|
++ --hash=sha256:033be54514a03e255df75c5aee8f9e672f663f93abb723444caec8fe43437bde \
|
|
|
|
++ --hash=sha256:52089f97fe7a9aa0d3277b220c1d730a85aefd64e1b2664696fe35317c5470a7
|
|
|
|
++ # via -r requirements.in
|
|
|
|
++-boto3==1.15.6 \
|
|
|
|
++- --hash=sha256:87534080a5addad135fcd631fa8b57a12e1a234c23d86521e84fbbd9217fd6a5 \
|
|
|
|
++- --hash=sha256:c4c84c6647e84a9f270d86da7eea1a250c2529e26ddb39320546f235327f10e6
|
|
|
|
+++boto3==1.16.63 \
|
|
|
|
+++ --hash=sha256:1c0003609e63e8cff51dee7a49e904bcdb20e140b5f7a10a03006289fd8c8dc1 \
|
|
|
|
+++ --hash=sha256:c919dac9773115025e1e2a7e462f60ca082e322bb6f4354247523e4226133b0b
|
|
|
|
++ # via -r requirements.in
|
|
|
|
++-botocore==1.18.6 \
|
|
|
|
++- --hash=sha256:31f04b68a6ebe8cfa97b4d70f54f29aef8b6a0bc9c4da7b8ee9b6a53fc69edae \
|
|
|
|
++- --hash=sha256:3de32a03679bb172a41c38e3c9af3f7259f3637f705aa2ac384b3233dc985b85
|
|
|
|
+++botocore==1.19.63 \
|
|
|
|
+++ --hash=sha256:ad4adfcc195b5401d84b0c65d3a89e507c1d54c201879c8761ff10ef5c361e21 \
|
|
|
|
+++ --hash=sha256:d3694f6ef918def8082513e5ef309cd6cd83b612e9984e3a66e8adc98c650a92
|
|
|
|
++ # via
|
|
|
|
++ # -r requirements.in
|
|
|
|
++ # boto3
|
|
|
|
++ # s3transfer
|
|
|
|
++ certifi==2020.6.20 \
|
|
|
|
++ --hash=sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3 \
|
|
|
|
++ --hash=sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41
|
|
|
|
++ # via
|