Browse Source

build stuff

Frank-Rainer Grahl 4 weeks ago
parent
commit
9d79ab3b9a

+ 126 - 0
mozilla-release/patches/1551618-68a1.patch

@@ -0,0 +1,126 @@
+# HG changeset patch
+# User Emilio Cobos Alvarez <emilio@crisal.io>
+# Date 1557925912 0
+# Node ID 33b8297c53bcc11843f0992e0e310c8bf9379c57
+# Parent  24a2db112eb829634b49dd178fe43a04770fffae
+Bug 1551618 - Check for libclang >= 4.0 in configure. r=froydnj
+
+This is better than failing with obscure rust errors later on.
+
+Differential Revision: https://phabricator.services.mozilla.com/D31102
+
+diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen.configure
+--- a/build/moz.configure/bindgen.configure
++++ b/build/moz.configure/bindgen.configure
+@@ -162,19 +162,19 @@ def bindgen_config_paths(llvm_config, li
+ 
+         if host.os == 'OpenBSD':
+             libclang_choices = glob.glob(path + '/libclang.so.*.*')
+ 
+         # At least one of the choices must be found.
+         for choice in libclang_choices:
+             libclang = os.path.join(path, choice)
+             if os.path.exists(libclang):
+-                return (True, None)
++                return (libclang, None)
+         else:
+-            return (False, list(set(libclang_choices)))
++            return (None, list(set(libclang_choices)))
+ 
+     # XXX: we want this code to be run for both Gecko and JS, but we don't
+     # necessarily want to force a bindgen/Rust dependency on JS just yet.
+     # Actually, we don't want to force an error if we're not building the
+     # browser generally.  We therefore whitelist the projects that require
+     # bindgen facilities at this point and leave it at that.
+     bindgen_projects = ('browser', 'mobile/android')
+ 
+@@ -214,26 +214,27 @@ def bindgen_config_paths(llvm_config, li
+ 
+         if not os.path.exists(libclang_path):
+             die(dedent('''\
+             The directory {} returned by `llvm-config {}` does not exist.
+             clang is required to build Firefox.  Please install the
+             necessary packages, or run `mach bootstrap`.
+             '''.format(libclang_path, libclang_arg)))
+ 
+-        (found, searched) = search_for_libclang(libclang_path)
+-        if not found:
++        (libclang, searched) = search_for_libclang(libclang_path)
++        if not libclang:
+             die(dedent('''\
+             Could not find the clang shared library in the path {}
+             returned by `llvm-config {}` (searched for files {}).
+             clang is required to build Firefox.  Please install the
+             necessary packages, or run `mach bootstrap`.
+             '''.format(libclang_path, libclang_arg, searched)))
+ 
+         return namespace(
++            libclang=libclang,
+             libclang_path=libclang_path,
+             clang_path=clang_resolved,
+         )
+ 
+     if (not libclang_path and clang_path) or \
+        (libclang_path and not clang_path):
+         if build_project not in bindgen_projects:
+             return namespace()
+@@ -246,34 +247,56 @@ def bindgen_config_paths(llvm_config, li
+     clang_path = clang_path[0]
+ 
+     if not os.path.exists(libclang_path) or \
+        not os.path.isdir(libclang_path):
+         die(dedent('''\
+         The argument to --with-libclang-path is not a directory: {}
+         '''.format(libclang_path)))
+ 
+-    (found, searched) = search_for_libclang(libclang_path)
+-    if not found:
++    (libclang, searched) = search_for_libclang(libclang_path)
++    if not libclang:
+         die(dedent('''\
+         Could not find the clang shared library in the path {}
+         specified by --with-libclang-path (searched for files {}).
+         '''.format(libclang_path, searched)))
+ 
+     clang_resolved = find_program(clang_path)
+     if not clang_resolved:
+         die(dedent('''\
+         The argument to --with-clang-path is not a file: {}
+         '''.format(clang_path)))
+ 
+     return namespace(
++        libclang=libclang,
+         libclang_path=libclang_path,
+         clang_path=clang_resolved,
+     )
+ 
++@depends(bindgen_config_paths.libclang)
++@checking('that libclang is new enough', lambda s: 'yes' if s else 'no')
++@imports(_from='ctypes', _import='CDLL')
++@imports(_from='textwrap', _import='dedent')
++def min_libclang_version(libclang):
++    try:
++        lib = CDLL(libclang)
++        # We want at least 4.0. The API we test below is enough for that.
++        # Just accessing it should throw if not found.
++        fun = lib.clang_EvalResult_getAsLongLong
++        return True
++    except:
++        die(dedent('''\
++        The libclang located at {} is too old (need at least 4.0).
++
++        Please make sure to update it or point to a newer libclang using
++        --with-libclang-path.
++        '''.format(libclang)))
++        return False
++
++
+ set_config('MOZ_LIBCLANG_PATH', bindgen_config_paths.libclang_path)
+ set_config('MOZ_CLANG_PATH', bindgen_config_paths.clang_path)
+ 
+ 
+ @depends(target, target_is_unix, cxx_compiler, bindgen_cflags_android,
+          bindgen_config_paths.clang_path, macos_sdk)
+ def basic_bindgen_cflags(target, is_unix, compiler_info, android_cflags,
+                          clang_path, macos_sdk):
+

+ 34 - 0
mozilla-release/patches/1552476-68a1.patch

@@ -0,0 +1,34 @@
+# HG changeset patch
+# User Emilio Cobos Alvarez <emilio@crisal.io>
+# Date 1558120615 0
+# Node ID a18f289abc5d8034c9cdf85992a646fff47966ed
+# Parent  f99a22d1e5a9c4723f558fb2dda3917e742ed5e3
+Bug 1552476 - Ensure we pass a string to CDLL on Windows. r=froydnj
+
+Apparently it doesn't deal very well with unicode objects.
+
+Differential Revision: https://phabricator.services.mozilla.com/D31619
+
+diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen.configure
+--- a/build/moz.configure/bindgen.configure
++++ b/build/moz.configure/bindgen.configure
+@@ -272,17 +272,17 @@ def bindgen_config_paths(llvm_config, li
+     )
+ 
+ @depends(bindgen_config_paths.libclang)
+ @checking('that libclang is new enough', lambda s: 'yes' if s else 'no')
+ @imports(_from='ctypes', _import='CDLL')
+ @imports(_from='textwrap', _import='dedent')
+ def min_libclang_version(libclang):
+     try:
+-        lib = CDLL(libclang)
++        lib = CDLL(libclang.encode('utf-8'))
+         # We want at least 4.0. The API we test below is enough for that.
+         # Just accessing it should throw if not found.
+         fun = lib.clang_EvalResult_getAsLongLong
+         return True
+     except:
+         die(dedent('''\
+         The libclang located at {} is too old (need at least 4.0).
+ 
+

+ 24 - 28
mozilla-release/patches/1627163-16-77a1.patch

@@ -2,7 +2,7 @@
 # User Mike Hommey <mh+mozilla@glandium.org>
 # User Mike Hommey <mh+mozilla@glandium.org>
 # Date 1586284316 0
 # Date 1586284316 0
 # Node ID 407894bc5f9c2e2dc6ed89a188a09b1f9a242a9d
 # Node ID 407894bc5f9c2e2dc6ed89a188a09b1f9a242a9d
-# Parent  39068e161c7130c1b7f404e39205502eda8935f0
+# Parent  49552ad2329fad35e70c807ad0859ffb8dcc55c7
 Bug 1627163 - Switch python configure to python 3. r=firefox-build-system-reviewers,rstewart
 Bug 1627163 - Switch python configure to python 3. r=firefox-build-system-reviewers,rstewart
 
 
 This also does a few remaining python 2 incompatible changes to
 This also does a few remaining python 2 incompatible changes to
@@ -10,32 +10,28 @@ This also does a few remaining python 2 incompatible changes to
 
 
 Differential Revision: https://phabricator.services.mozilla.com/D69538
 Differential Revision: https://phabricator.services.mozilla.com/D69538
 
 
-diff --git a/build/moz.configure/bindgen.configure.1627163.later b/build/moz.configure/bindgen.configure.1627163.later
-new file mode 100644
---- /dev/null
-+++ b/build/moz.configure/bindgen.configure.1627163.later
-@@ -0,0 +1,21 @@
-+--- bindgen.configure
-++++ bindgen.configure
-+@@ -219,17 +219,17 @@ def bindgen_config_paths(clang, libclang
-+ 
-+ 
-+ @depends(bindgen_config_paths.libclang, when=bindgen_config_paths)
-+ @checking('that libclang is new enough', lambda s: 'yes' if s else 'no')
-+ @imports(_from='ctypes', _import='CDLL')
-+ @imports(_from='textwrap', _import='dedent')
-+ def min_libclang_version(libclang):
-+     try:
-+-        lib = CDLL(libclang.encode('utf-8'))
-++        lib = CDLL(libclang)
-+         # We want at least 4.0. The API we test below is enough for that.
-+         # Just accessing it should throw if not found.
-+         fun = lib.clang_EvalResult_getAsLongLong
-+         return True
-+     except:
-+         die(dedent('''\
-+         The libclang located at {} is too old (need at least 4.0).
-+ 
+diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen.configure
+--- a/build/moz.configure/bindgen.configure
++++ b/build/moz.configure/bindgen.configure
+@@ -272,17 +272,17 @@ def bindgen_config_paths(llvm_config, li
+     )
+ 
+ @depends(bindgen_config_paths.libclang)
+ @checking('that libclang is new enough', lambda s: 'yes' if s else 'no')
+ @imports(_from='ctypes', _import='CDLL')
+ @imports(_from='textwrap', _import='dedent')
+ def min_libclang_version(libclang):
+     try:
+-        lib = CDLL(libclang.encode('utf-8'))
++        lib = CDLL(libclang)
+         # We want at least 4.0. The API we test below is enough for that.
+         # Just accessing it should throw if not found.
+         fun = lib.clang_EvalResult_getAsLongLong
+         return True
+     except:
+         die(dedent('''\
+         The libclang located at {} is too old (need at least 4.0).
+ 
 diff --git a/build/moz.configure/init.configure b/build/moz.configure/init.configure
 diff --git a/build/moz.configure/init.configure b/build/moz.configure/init.configure
 --- a/build/moz.configure/init.configure
 --- a/build/moz.configure/init.configure
 +++ b/build/moz.configure/init.configure
 +++ b/build/moz.configure/init.configure
@@ -461,7 +457,7 @@ diff --git a/build/moz.configure/node.configure b/build/moz.configure/node.confi
 diff --git a/build/moz.configure/util.configure b/build/moz.configure/util.configure
 diff --git a/build/moz.configure/util.configure b/build/moz.configure/util.configure
 --- a/build/moz.configure/util.configure
 --- a/build/moz.configure/util.configure
 +++ b/build/moz.configure/util.configure
 +++ b/build/moz.configure/util.configure
-@@ -283,17 +283,17 @@ def unique_list(l):
+@@ -277,17 +277,17 @@ def unique_list(l):
  #      r'C:\Program Files (x86)\Windows Kits\10\')
  #      r'C:\Program Files (x86)\Windows Kits\10\')
  #
  #
  #   get_registry_values(r'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\'
  #   get_registry_values(r'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\'

+ 42 - 0
mozilla-release/patches/1641806-79a1.patch

@@ -0,0 +1,42 @@
+# HG changeset patch
+# User Mitchell Hentges <mhentges@mozilla.com>
+# Date 1591301720 0
+# Node ID bddb6d72204c550084fb318695f9db740379d419
+# Parent  324abdf9aff66d28102917838b64bc2fb50dd9fb
+Bug 1641806: configure should look for libclang >= 5.0 r=rstewart
+
+clang_getAddressSpace was found as a clang 5.0+ API by comparing the "tools/libclang/libclang.exports" files in the clang source code.
+"clang_getAddressSpace" exists for 5.0.0 and 10.0.0, but not for 4.0.1.
+
+Differential Revision: https://phabricator.services.mozilla.com/D78374
+
+diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen.configure
+--- a/build/moz.configure/bindgen.configure
++++ b/build/moz.configure/bindgen.configure
+@@ -273,23 +273,23 @@ def bindgen_config_paths(llvm_config, li
+ 
+ @depends(bindgen_config_paths.libclang)
+ @checking('that libclang is new enough', lambda s: 'yes' if s else 'no')
+ @imports(_from='ctypes', _import='CDLL')
+ @imports(_from='textwrap', _import='dedent')
+ def min_libclang_version(libclang):
+     try:
+         lib = CDLL(libclang)
+-        # We want at least 4.0. The API we test below is enough for that.
++        # We want at least 5.0. The API we test below is enough for that.
+         # Just accessing it should throw if not found.
+-        fun = lib.clang_EvalResult_getAsLongLong
++        fun = lib.clang_getAddressSpace
+         return True
+     except:
+         die(dedent('''\
+-        The libclang located at {} is too old (need at least 4.0).
++        The libclang located at {} is too old (need at least 5.0).
+ 
+         Please make sure to update it or point to a newer libclang using
+         --with-libclang-path.
+         '''.format(libclang)))
+         return False
+ 
+ 
+ set_config('MOZ_LIBCLANG_PATH', bindgen_config_paths.libclang_path)

+ 7586 - 0
mozilla-release/patches/1715900-99a1.patch

@@ -0,0 +1,7586 @@
+# HG changeset patch
+# User Thomas Wisniewski <twisniewski@mozilla.com>
+# Date 1644937638 0
+# Node ID 8db101a9793765d705c30577d95ea6b6ad99b28c
+# Parent  39b976c7b734a6a6706cb686b2faaffb2e35c7d8
+Bug 1715900 - Bump urllib3 to version 1.26.0, boto3 to 1.16.63, and botocore to 1.19.63; r=mhentges
+
+Differential Revision: https://phabricator.services.mozilla.com/D138383
+
+diff --git a/third_party/python/requirements.in b/third_party/python/requirements.in
+--- a/third_party/python/requirements.in
++++ b/third_party/python/requirements.in
+@@ -35,10 +35,11 @@ pyasn1==0.4.8
+ pytest==3.6.2
+ python-hglib==2.4
+ pytoml==0.1.10
+ pyyaml==5.4.1
+ redo==2.0.3
+ requests==2.25.1
+ responses==0.10.6
+ six==1.13.0
++urllib3==1.26
+ voluptuous==0.11.5
+ yamllint==1.23
+diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt
+--- a/third_party/python/requirements.txt
++++ b/third_party/python/requirements.txt
+@@ -197,20 +197,22 @@ six==1.13.0 \
+     #   -r requirements-mach-vendor-python.in
+     #   blessings
+     #   compare-locales
+     #   ecdsa
+     #   fluent.migrate
+     #   more-itertools
+     #   pytest
+     #   responses
+-urllib3==1.25.9 \
+-    --hash=sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527 \
+-    --hash=sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115
+-    # via requests
++urllib3==1.26.0 \
++    --hash=sha256:4849f132941d68144df0a3785ccc4fe423430ba5db0108d045c8cadbc90f517a \
++    --hash=sha256:bad31cb622ceee0ab46c4c884cf61957def0ff2e644de0a7a093678844c9ccac
++    # via
++    #   -r requirements-mach-vendor-python.in
++    #   requests
+ voluptuous==0.11.5 \
+     --hash=sha256:303542b3fc07fb52ec3d7a1c614b329cdbee13a9d681935353d8ea56a7bfa9f1 \
+     --hash=sha256:567a56286ef82a9d7ae0628c5842f65f516abcb496e74f3f59f1d7b28df314ef
+     # via -r requirements-mach-vendor-python.in
+ yamllint==1.23 \
+     --hash=sha256:0fa69bf8a86182b7fe14918bdd3a30354c869966bbc7cbfff176af71bda9c806 \
+     --hash=sha256:59f3ff77f44e7f46be6aecdb985830f73a1c51e290b7082a7d38c2ae1940f4a9
+     # via -r requirements-mach-vendor-python.in
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/LICENSE.txt b/third_party/python/urllib3/urllib3-1.25.9.dist-info/LICENSE.txt
+deleted file mode 100644
+--- a/third_party/python/urllib3/urllib3-1.25.9.dist-info/LICENSE.txt
++++ /dev/null
+@@ -1,21 +0,0 @@
+-MIT License
+-
+-Copyright (c) 2008-2019 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+-
+-Permission is hereby granted, free of charge, to any person obtaining a copy
+-of this software and associated documentation files (the "Software"), to deal
+-in the Software without restriction, including without limitation the rights
+-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+-copies of the Software, and to permit persons to whom the Software is
+-furnished to do so, subject to the following conditions:
+-
+-The above copyright notice and this permission notice shall be included in all
+-copies or substantial portions of the Software.
+-
+-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+-SOFTWARE.
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/METADATA b/third_party/python/urllib3/urllib3-1.25.9.dist-info/METADATA
+deleted file mode 100644
+--- a/third_party/python/urllib3/urllib3-1.25.9.dist-info/METADATA
++++ /dev/null
+@@ -1,1262 +0,0 @@
+-Metadata-Version: 2.1
+-Name: urllib3
+-Version: 1.25.9
+-Summary: HTTP library with thread-safe connection pooling, file post, and more.
+-Home-page: https://urllib3.readthedocs.io/
+-Author: Andrey Petrov
+-Author-email: andrey.petrov@shazow.net
+-License: MIT
+-Project-URL: Documentation, https://urllib3.readthedocs.io/
+-Project-URL: Code, https://github.com/urllib3/urllib3
+-Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
+-Keywords: urllib httplib threadsafe filepost http https ssl pooling
+-Platform: UNKNOWN
+-Classifier: Environment :: Web Environment
+-Classifier: Intended Audience :: Developers
+-Classifier: License :: OSI Approved :: MIT License
+-Classifier: Operating System :: OS Independent
+-Classifier: Programming Language :: Python
+-Classifier: Programming Language :: Python :: 2
+-Classifier: Programming Language :: Python :: 2.7
+-Classifier: Programming Language :: Python :: 3
+-Classifier: Programming Language :: Python :: 3.5
+-Classifier: Programming Language :: Python :: 3.6
+-Classifier: Programming Language :: Python :: 3.7
+-Classifier: Programming Language :: Python :: 3.8
+-Classifier: Programming Language :: Python :: 3.9
+-Classifier: Programming Language :: Python :: Implementation :: CPython
+-Classifier: Programming Language :: Python :: Implementation :: PyPy
+-Classifier: Topic :: Internet :: WWW/HTTP
+-Classifier: Topic :: Software Development :: Libraries
+-Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
+-Provides-Extra: brotli
+-Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
+-Provides-Extra: secure
+-Requires-Dist: certifi ; extra == 'secure'
+-Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
+-Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
+-Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
+-Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
+-Provides-Extra: socks
+-Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
+-
+-urllib3
+-=======
+-
+-urllib3 is a powerful, *sanity-friendly* HTTP client for Python. Much of the
+-Python ecosystem already uses urllib3 and you should too.
+-urllib3 brings many critical features that are missing from the Python
+-standard libraries:
+-
+-- Thread safety.
+-- Connection pooling.
+-- Client-side SSL/TLS verification.
+-- File uploads with multipart encoding.
+-- Helpers for retrying requests and dealing with HTTP redirects.
+-- Support for gzip, deflate, and brotli encoding.
+-- Proxy support for HTTP and SOCKS.
+-- 100% test coverage.
+-
+-urllib3 is powerful and easy to use::
+-
+-    >>> import urllib3
+-    >>> http = urllib3.PoolManager()
+-    >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
+-    >>> r.status
+-    200
+-    >>> r.data
+-    'User-agent: *\nDisallow: /deny\n'
+-
+-
+-Installing
+-----------
+-
+-urllib3 can be installed with `pip <https://pip.pypa.io>`_::
+-
+-    $ pip install urllib3
+-
+-Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+-
+-    $ git clone git://github.com/urllib3/urllib3.git
+-    $ python setup.py install
+-
+-
+-Documentation
+--------------
+-
+-urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
+-
+-
+-Contributing
+-------------
+-
+-urllib3 happily accepts contributions. Please see our
+-`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
+-for some tips on getting started.
+-
+-
+-Security Disclosures
+---------------------
+-
+-To report a security vulnerability, please use the
+-`Tidelift security contact <https://tidelift.com/security>`_.
+-Tidelift will coordinate the fix and disclosure with maintainers.
+-
+-Maintainers
+------------
+-
+-- `@sethmlarson <https://github.com/sethmlarson>`_ (Seth M. Larson)
+-- `@pquentin <https://github.com/pquentin>`_ (Quentin Pradet)
+-- `@theacodes <https://github.com/theacodes>`_ (Thea Flowers)
+-- `@haikuginger <https://github.com/haikuginger>`_ (Jess Shapiro)
+-- `@lukasa <https://github.com/lukasa>`_ (Cory Benfield)
+-- `@sigmavirus24 <https://github.com/sigmavirus24>`_ (Ian Stapleton Cordasco)
+-- `@shazow <https://github.com/shazow>`_ (Andrey Petrov)
+-
+-👋
+-
+-
+-Sponsorship
+------------
+-
+-.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
+-   :width: 75
+-   :alt: Tidelift
+-
+-.. list-table::
+-   :widths: 10 100
+-
+-   * - |tideliftlogo|
+-     - Professional support for urllib3 is available as part of the `Tidelift
+-       Subscription`_.  Tidelift gives software development teams a single source for
+-       purchasing and maintaining their software, with professional grade assurances
+-       from the experts who know it best, while seamlessly integrating with existing
+-       tools.
+-
+-.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+-
+-If your company benefits from this library, please consider `sponsoring its
+-development <https://urllib3.readthedocs.io/en/latest/contributing.html#sponsorship-project-grants>`_.
+-
+-Sponsors include:
+-
+-- Abbott (2018-2019), sponsored `@sethmlarson <https://github.com/sethmlarson>`_'s work on urllib3.
+-- Google Cloud Platform (2018-2019), sponsored `@theacodes <https://github.com/theacodes>`_'s work on urllib3.
+-- Akamai (2017-2018), sponsored `@haikuginger <https://github.com/haikuginger>`_'s work on urllib3
+-- Hewlett Packard Enterprise (2016-2017), sponsored `@Lukasa’s <https://github.com/Lukasa>`_ work on urllib3.
+-
+-
+-Changes
+-=======
+-
+-1.25.9 (2020-04-16)
+--------------------
+-
+-* Added ``InvalidProxyConfigurationWarning`` which is raised when
+-  erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
+-  support connecting to HTTPS proxies but will soon be able to
+-  and we would like users to migrate properly without much breakage.
+-
+-  See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
+-  for more information on how to fix your proxy config. (Pull #1851)
+-
+-* Drain connection after ``PoolManager`` redirect (Pull #1817)
+-
+-* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
+-
+-* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
+-
+-* Allow the CA certificate data to be passed as a string (Pull #1804)
+-
+-* Raise ``ValueError`` if method contains control characters (Pull #1800)
+-
+-* Add ``__repr__`` to ``Timeout`` (Pull #1795)
+-
+-
+-1.25.8 (2020-01-20)
+--------------------
+-
+-* Drop support for EOL Python 3.4 (Pull #1774)
+-
+-* Optimize _encode_invalid_chars (Pull #1787)
+-
+-
+-1.25.7 (2019-11-11)
+--------------------
+-
+-* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
+-
+-* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
+-
+-* Fix issue where URL fragment was sent within the request target. (Pull #1732)
+-
+-* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
+-
+-* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
+-
+-
+-1.25.6 (2019-09-24)
+--------------------
+-
+-* Fix issue where tilde (``~``) characters were incorrectly
+-  percent-encoded in the path. (Pull #1692)
+-
+-
+-1.25.5 (2019-09-19)
+--------------------
+-
+-* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
+-  caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
+-  (Issue #1682)
+-
+-
+-1.25.4 (2019-09-19)
+--------------------
+-
+-* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
+-
+-* Fix edge case where Retry-After header was still respected even when
+-  explicitly opted out of. (Pull #1607)
+-
+-* Remove dependency on ``rfc3986`` for URL parsing.
+-
+-* Fix issue where URLs containing invalid characters within ``Url.auth`` would
+-  raise an exception instead of percent-encoding those characters.
+-
+-* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
+-  work well with BufferedReaders and other ``io`` module features. (Pull #1652)
+-
+-* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
+-
+-
+-1.25.3 (2019-05-23)
+--------------------
+-
+-* Change ``HTTPSConnection`` to load system CA certificates
+-  when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
+-  unspecified. (Pull #1608, Issue #1603)
+-
+-* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
+-
+-
+-1.25.2 (2019-04-28)
+--------------------
+-
+-* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
+-
+-* Change ``parse_url`` to percent-encode invalid characters within the
+-  path, query, and target components. (Pull #1586)
+-
+-
+-1.25.1 (2019-04-24)
+--------------------
+-
+-* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
+-
+-* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
+-
+-
+-1.25 (2019-04-22)
+------------------
+-
+-* Require and validate certificates by default when using HTTPS (Pull #1507)
+-
+-* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
+-
+-* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
+-  encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
+-
+-* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
+-  implementations. (Pull #1496)
+-
+-* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, PR #1492)
+-
+-* Fixed issue where OpenSSL would block if an encrypted client private key was
+-  given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
+-
+-* Added support for Brotli content encoding. It is enabled automatically if
+-  ``brotlipy`` package is installed which can be requested with
+-  ``urllib3[brotli]`` extra. (Pull #1532)
+-
+-* Drop ciphers using DSS key exchange from default TLS cipher suites.
+-  Improve default ciphers when using SecureTransport. (Pull #1496)
+-
+-* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
+-
+-1.24.3 (2019-05-01)
+--------------------
+-
+-* Apply fix for CVE-2019-9740. (Pull #1591)
+-
+-1.24.2 (2019-04-17)
+--------------------
+-
+-* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
+-  ``ssl_context`` parameters are specified.
+-
+-* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
+-
+-* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
+-
+-
+-1.24.1 (2018-11-02)
+--------------------
+-
+-* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
+-
+-* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
+-
+-
+-1.24 (2018-10-16)
+------------------
+-
+-* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
+-
+-* Test against Python 3.7 on AppVeyor. (Pull #1453)
+-
+-* Early-out ipv6 checks when running on App Engine. (Pull #1450)
+-
+-* Change ambiguous description of backoff_factor (Pull #1436)
+-
+-* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
+-
+-* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
+-
+-* Add a server_hostname parameter to HTTPSConnection which allows for
+-  overriding the SNI hostname sent in the handshake. (Pull #1397)
+-
+-* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
+-
+-* Fixed bug where responses with header Content-Type: message/* erroneously
+-  raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
+-
+-* Move urllib3 to src/urllib3 (Pull #1409)
+-
+-
+-1.23 (2018-06-04)
+------------------
+-
+-* Allow providing a list of headers to strip from requests when redirecting
+-  to a different host. Defaults to the ``Authorization`` header. Different
+-  headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
+-
+-* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
+-
+-* Dropped Python 3.3 support. (Pull #1242)
+-
+-* Put the connection back in the pool when calling stream() or read_chunked() on
+-  a chunked HEAD response. (Issue #1234)
+-
+-* Fixed pyOpenSSL-specific ssl client authentication issue when clients
+-  attempted to auth via certificate + chain (Issue #1060)
+-
+-* Add the port to the connectionpool connect print (Pull #1251)
+-
+-* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
+-
+-* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
+-
+-* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
+-
+-* Added support for auth info in url for SOCKS proxy (Pull #1363)
+-
+-
+-1.22 (2017-07-20)
+------------------
+-
+-* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
+-  IPv6 proxy. (Issue #1222)
+-
+-* Made the connection pool retry on ``SSLError``.  The original ``SSLError``
+-  is available on ``MaxRetryError.reason``. (Issue #1112)
+-
+-* Drain and release connection before recursing on retry/redirect.  Fixes
+-  deadlocks with a blocking connectionpool. (Issue #1167)
+-
+-* Fixed compatibility for cookiejar. (Issue #1229)
+-
+-* pyopenssl: Use vendored version of ``six``. (Issue #1231)
+-
+-
+-1.21.1 (2017-05-02)
+--------------------
+-
+-* Fixed SecureTransport issue that would cause long delays in response body
+-  delivery. (Pull #1154)
+-
+-* Fixed regression in 1.21 that threw exceptions when users passed the
+-  ``socket_options`` flag to the ``PoolManager``.  (Issue #1165)
+-
+-* Fixed regression in 1.21 that threw exceptions when users passed the
+-  ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
+-  (Pull #1157)
+-
+-
+-1.21 (2017-04-25)
+------------------
+-
+-* Improved performance of certain selector system calls on Python 3.5 and
+-  later. (Pull #1095)
+-
+-* Resolved issue where the PyOpenSSL backend would not wrap SysCallError
+-  exceptions appropriately when sending data. (Pull #1125)
+-
+-* Selectors now detects a monkey-patched select module after import for modules
+-  that patch the select module like eventlet, greenlet. (Pull #1128)
+-
+-* Reduced memory consumption when streaming zlib-compressed responses
+-  (as opposed to raw deflate streams). (Pull #1129)
+-
+-* Connection pools now use the entire request context when constructing the
+-  pool key. (Pull #1016)
+-
+-* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
+-  ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
+-  (Pull #1016)
+-
+-* Add retry counter for ``status_forcelist``. (Issue #1147)
+-
+-* Added ``contrib`` module for using SecureTransport on macOS:
+-  ``urllib3.contrib.securetransport``.  (Pull #1122)
+-
+-* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
+-  for schemes it does not recognise, it assumes they are case-sensitive and
+-  leaves them unchanged.
+-  (Issue #1080)
+-
+-
+-1.20 (2017-01-19)
+------------------
+-
+-* Added support for waiting for I/O using selectors other than select,
+-  improving urllib3's behaviour with large numbers of concurrent connections.
+-  (Pull #1001)
+-
+-* Updated the date for the system clock check. (Issue #1005)
+-
+-* ConnectionPools now correctly consider hostnames to be case-insensitive.
+-  (Issue #1032)
+-
+-* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
+-  to fail when it is injected, rather than at first use. (Pull #1063)
+-
+-* Outdated versions of cryptography now cause the PyOpenSSL contrib module
+-  to fail when it is injected, rather than at first use. (Issue #1044)
+-
+-* Automatically attempt to rewind a file-like body object when a request is
+-  retried or redirected. (Pull #1039)
+-
+-* Fix some bugs that occur when modules incautiously patch the queue module.
+-  (Pull #1061)
+-
+-* Prevent retries from occurring on read timeouts for which the request method
+-  was not in the method whitelist. (Issue #1059)
+-
+-* Changed the PyOpenSSL contrib module to lazily load idna to avoid
+-  unnecessarily bloating the memory of programs that don't need it. (Pull
+-  #1076)
+-
+-* Add support for IPv6 literals with zone identifiers. (Pull #1013)
+-
+-* Added support for socks5h:// and socks4a:// schemes when working with SOCKS
+-  proxies, and controlled remote DNS appropriately. (Issue #1035)
+-
+-
+-1.19.1 (2016-11-16)
+--------------------
+-
+-* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
+-
+-
+-1.19 (2016-11-03)
+------------------
+-
+-* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
+-  using the default retry logic. (Pull #955)
+-
+-* Remove markers from setup.py to assist ancient setuptools versions. (Issue
+-  #986)
+-
+-* Disallow superscripts and other integerish things in URL ports. (Issue #989)
+-
+-* Allow urllib3's HTTPResponse.stream() method to continue to work with
+-  non-httplib underlying FPs. (Pull #990)
+-
+-* Empty filenames in multipart headers are now emitted as such, rather than
+-  being suppressed. (Issue #1015)
+-
+-* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
+-
+-
+-1.18.1 (2016-10-27)
+--------------------
+-
+-* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
+-  PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
+-  release fixes a vulnerability whereby urllib3 in the above configuration
+-  would silently fail to validate TLS certificates due to erroneously setting
+-  invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
+-  flags do not cause a problem in OpenSSL versions before 1.1.0, which
+-  interprets the presence of any flag as requesting certificate validation.
+-
+-  There is no PR for this patch, as it was prepared for simultaneous disclosure
+-  and release. The master branch received the same fix in PR #1010.
+-
+-
+-1.18 (2016-09-26)
+------------------
+-
+-* Fixed incorrect message for IncompleteRead exception. (PR #973)
+-
+-* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
+-  (Issue #258)
+-
+-* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
+-  (Issue #977)
+-
+-* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
+-
+-
+-1.17 (2016-09-06)
+------------------
+-
+-* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
+-
+-* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
+-
+-* Substantially refactored documentation. (Issue #887)
+-
+-* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
+-  (Issue #858)
+-
+-* Normalize the scheme and host in the URL parser (Issue #833)
+-
+-* ``HTTPResponse`` contains the last ``Retry`` object, which now also
+-  contains retries history. (Issue #848)
+-
+-* Timeout can no longer be set as boolean, and must be greater than zero.
+-  (PR #924)
+-
+-* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
+-  now use cryptography and idna, both of which are already dependencies of
+-  PyOpenSSL. (PR #930)
+-
+-* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
+-
+-* Try to use the operating system's certificates when we are using an
+-  ``SSLContext``. (PR #941)
+-
+-* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
+-  ChaCha20, but ChaCha20 is then preferred to everything else. (PR #947)
+-
+-* Updated cipher suite list to remove 3DES-based cipher suites. (PR #958)
+-
+-* Removed the cipher suite fallback to allow HIGH ciphers. (PR #958)
+-
+-* Implemented ``length_remaining`` to determine remaining content
+-  to be read. (PR #949)
+-
+-* Implemented ``enforce_content_length`` to enable exceptions when
+-  incomplete data chunks are received. (PR #949)
+-
+-* Dropped connection start, dropped connection reset, redirect, forced retry,
+-  and new HTTPS connection log levels to DEBUG, from INFO. (PR #967)
+-
+-
+-1.16 (2016-06-11)
+------------------
+-
+-* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
+-
+-* Provide ``key_fn_by_scheme`` pool keying mechanism that can be
+-  overridden. (Issue #830)
+-
+-* Normalize scheme and host to lowercase for pool keys, and include
+-  ``source_address``. (Issue #830)
+-
+-* Cleaner exception chain in Python 3 for ``_make_request``.
+-  (Issue #861)
+-
+-* Fixed installing ``urllib3[socks]`` extra. (Issue #864)
+-
+-* Fixed signature of ``ConnectionPool.close`` so it can actually safely be
+-  called by subclasses. (Issue #873)
+-
+-* Retain ``release_conn`` state across retries. (Issues #651, #866)
+-
+-* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
+-  ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
+-
+-
+-1.15.1 (2016-04-11)
+--------------------
+-
+-* Fix packaging to include backports module. (Issue #841)
+-
+-
+-1.15 (2016-04-06)
+------------------
+-
+-* Added Retry(raise_on_status=False). (Issue #720)
+-
+-* Always use setuptools, no more distutils fallback. (Issue #785)
+-
+-* Dropped support for Python 3.2. (Issue #786)
+-
+-* Chunked transfer encoding when requesting with ``chunked=True``.
+-  (Issue #790)
+-
+-* Fixed regression with IPv6 port parsing. (Issue #801)
+-
+-* Append SNIMissingWarning messages to allow users to specify it in
+-  the PYTHONWARNINGS environment variable. (Issue #816)
+-
+-* Handle unicode headers in Py2. (Issue #818)
+-
+-* Log certificate when there is a hostname mismatch. (Issue #820)
+-
+-* Preserve order of request/response headers. (Issue #821)
+-
+-
+-1.14 (2015-12-29)
+------------------
+-
+-* contrib: SOCKS proxy support! (Issue #762)
+-
+-* Fixed AppEngine handling of transfer-encoding header and bug
+-  in Timeout defaults checking. (Issue #763)
+-
+-
+-1.13.1 (2015-12-18)
+--------------------
+-
+-* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
+-
+-
+-1.13 (2015-12-14)
+------------------
+-
+-* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
+-
+-* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
+-
+-* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
+-
+-* Close connections more defensively on exception. (Issue #734)
+-
+-* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
+-  repeatedly flushing the decoder, to function better on Jython. (Issue #743)
+-
+-* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
+-
+-
+-1.12 (2015-09-03)
+------------------
+-
+-* Rely on ``six`` for importing ``httplib`` to work around
+-  conflicts with other Python 3 shims. (Issue #688)
+-
+-* Add support for directories of certificate authorities, as supported by
+-  OpenSSL. (Issue #701)
+-
+-* New exception: ``NewConnectionError``, raised when we fail to establish
+-  a new connection, usually ``ECONNREFUSED`` socket error.
+-
+-
+-1.11 (2015-07-21)
+------------------
+-
+-* When ``ca_certs`` is given, ``cert_reqs`` defaults to
+-  ``'CERT_REQUIRED'``. (Issue #650)
+-
+-* ``pip install urllib3[secure]`` will install Certifi and
+-  PyOpenSSL as dependencies. (Issue #678)
+-
+-* Made ``HTTPHeaderDict`` usable as a ``headers`` input value
+-  (Issues #632, #679)
+-
+-* Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
+-  which has an ``AppEngineManager`` for using ``URLFetch`` in a
+-  Google AppEngine environment. (Issue #664)
+-
+-* Dev: Added test suite for AppEngine. (Issue #631)
+-
+-* Fix performance regression when using PyOpenSSL. (Issue #626)
+-
+-* Passing incorrect scheme (e.g. ``foo://``) will raise
+-  ``ValueError`` instead of ``AssertionError`` (backwards
+-  compatible for now, but please migrate). (Issue #640)
+-
+-* Fix pools not getting replenished when an error occurs during a
+-  request using ``release_conn=False``. (Issue #644)
+-
+-* Fix pool-default headers not applying for url-encoded requests
+-  like GET. (Issue #657)
+-
+-* log.warning in Python 3 when headers are skipped due to parsing
+-  errors. (Issue #642)
+-
+-* Close and discard connections if an error occurs during read.
+-  (Issue #660)
+-
+-* Fix host parsing for IPv6 proxies. (Issue #668)
+-
+-* Separate warning type SubjectAltNameWarning, now issued once
+-  per host. (Issue #671)
+-
+-* Fix ``httplib.IncompleteRead`` not getting converted to
+-  ``ProtocolError`` when using ``HTTPResponse.stream()``
+-  (Issue #674)
+-
+-1.10.4 (2015-05-03)
+--------------------
+-
+-* Migrate tests to Tornado 4. (Issue #594)
+-
+-* Append default warning configuration rather than overwrite.
+-  (Issue #603)
+-
+-* Fix streaming decoding regression. (Issue #595)
+-
+-* Fix chunked requests losing state across keep-alive connections.
+-  (Issue #599)
+-
+-* Fix hanging when chunked HEAD response has no body. (Issue #605)
+-
+-
+-1.10.3 (2015-04-21)
+--------------------
+-
+-* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
+-  (Issue #558)
+-
+-* Fix regression of duplicate header keys being discarded.
+-  (Issue #563)
+-
+-* ``Response.stream()`` returns a generator for chunked responses.
+-  (Issue #560)
+-
+-* Set upper-bound timeout when waiting for a socket in PyOpenSSL.
+-  (Issue #585)
+-
+-* Work on platforms without `ssl` module for plain HTTP requests.
+-  (Issue #587)
+-
+-* Stop relying on the stdlib's default cipher list. (Issue #588)
+-
+-
+-1.10.2 (2015-02-25)
+--------------------
+-
+-* Fix file descriptor leakage on retries. (Issue #548)
+-
+-* Removed RC4 from default cipher list. (Issue #551)
+-
+-* Header performance improvements. (Issue #544)
+-
+-* Fix PoolManager not obeying redirect retry settings. (Issue #553)
+-
+-
+-1.10.1 (2015-02-10)
+--------------------
+-
+-* Pools can be used as context managers. (Issue #545)
+-
+-* Don't re-use connections which experienced an SSLError. (Issue #529)
+-
+-* Don't fail when gzip decoding an empty stream. (Issue #535)
+-
+-* Add sha256 support for fingerprint verification. (Issue #540)
+-
+-* Fixed handling of header values containing commas. (Issue #533)
+-
+-
+-1.10 (2014-12-14)
+------------------
+-
+-* Disabled SSLv3. (Issue #473)
+-
+-* Add ``Url.url`` property to return the composed url string. (Issue #394)
+-
+-* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
+-
+-* ``MaxRetryError.reason`` will always be an exception, not string.
+-  (Issue #481)
+-
+-* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
+-
+-* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
+-
+-* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
+-  (Issue #496)
+-
+-* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
+-  (Issue #499)
+-
+-* Close and discard sockets which experienced SSL-related errors.
+-  (Issue #501)
+-
+-* Handle ``body`` param in ``.request(...)``. (Issue #513)
+-
+-* Respect timeout with HTTPS proxy. (Issue #505)
+-
+-* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
+-
+-
+-1.9.1 (2014-09-13)
+-------------------
+-
+-* Apply socket arguments before binding. (Issue #427)
+-
+-* More careful checks if fp-like object is closed. (Issue #435)
+-
+-* Fixed packaging issues of some development-related files not
+-  getting included. (Issue #440)
+-
+-* Allow performing *only* fingerprint verification. (Issue #444)
+-
+-* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
+-
+-* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
+-
+-* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
+-  (Issue #443)
+-
+-
+-
+-1.9 (2014-07-04)
+-----------------
+-
+-* Shuffled around development-related files. If you're maintaining a distro
+-  package of urllib3, you may need to tweak things. (Issue #415)
+-
+-* Unverified HTTPS requests will trigger a warning on the first request. See
+-  our new `security documentation
+-  <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
+-  (Issue #426)
+-
+-* New retry logic and ``urllib3.util.retry.Retry`` configuration object.
+-  (Issue #326)
+-
+-* All raised exceptions should now wrapped in a
+-  ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
+-
+-* All errors during a retry-enabled request should be wrapped in
+-  ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
+-  which were previously exempt. Underlying error is accessible from the
+-  ``.reason`` property. (Issue #326)
+-
+-* ``urllib3.exceptions.ConnectionError`` renamed to
+-  ``urllib3.exceptions.ProtocolError``. (Issue #326)
+-
+-* Errors during response read (such as IncompleteRead) are now wrapped in
+-  ``urllib3.exceptions.ProtocolError``. (Issue #418)
+-
+-* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
+-  (Issue #417)
+-
+-* Catch read timeouts over SSL connections as
+-  ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
+-
+-* Apply socket arguments before connecting. (Issue #427)
+-
+-
+-1.8.3 (2014-06-23)
+-------------------
+-
+-* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
+-
+-* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
+-
+-* Wrap ``socket.timeout`` exception with
+-  ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
+-
+-* Fixed proxy-related bug where connections were being reused incorrectly.
+-  (Issues #366, #369)
+-
+-* Added ``socket_options`` keyword parameter which allows to define
+-  ``setsockopt`` configuration of new sockets. (Issue #397)
+-
+-* Removed ``HTTPConnection.tcp_nodelay`` in favor of
+-  ``HTTPConnection.default_socket_options``. (Issue #397)
+-
+-* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
+-
+-
+-1.8.2 (2014-04-17)
+-------------------
+-
+-* Fix ``urllib3.util`` not being included in the package.
+-
+-
+-1.8.1 (2014-04-17)
+-------------------
+-
+-* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
+-
+-* Don't install ``dummyserver`` into ``site-packages`` as it's only needed
+-  for the test suite. (Issue #362)
+-
+-* Added support for specifying ``source_address``. (Issue #352)
+-
+-
+-1.8 (2014-03-04)
+-----------------
+-
+-* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
+-  username, and blank ports like 'hostname:').
+-
+-* New ``urllib3.connection`` module which contains all the HTTPConnection
+-  objects.
+-
+-* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
+-  signature to a more sensible order. [Backwards incompatible]
+-  (Issues #252, #262, #263)
+-
+-* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
+-
+-* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
+-  returns the number of bytes read so far. (Issue #277)
+-
+-* Support for platforms without threading. (Issue #289)
+-
+-* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
+-  to allow a pool with no specified port to be considered equal to to an
+-  HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
+-
+-* Improved default SSL/TLS settings to avoid vulnerabilities.
+-  (Issue #309)
+-
+-* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
+-  (Issue #310)
+-
+-* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
+-  will send the entire HTTP request ~200 milliseconds faster; however, some of
+-  the resulting TCP packets will be smaller. (Issue #254)
+-
+-* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
+-  from the default 64 to 1024 in a single certificate. (Issue #318)
+-
+-* Headers are now passed and stored as a custom
+-  ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
+-  (Issue #329, #333)
+-
+-* Headers no longer lose their case on Python 3. (Issue #236)
+-
+-* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
+-  certificates on inject. (Issue #332)
+-
+-* Requests with ``retries=False`` will immediately raise any exceptions without
+-  wrapping them in ``MaxRetryError``. (Issue #348)
+-
+-* Fixed open socket leak with SSL-related failures. (Issue #344, #348)
+-
+-
+-1.7.1 (2013-09-25)
+-------------------
+-
+-* Added granular timeout support with new ``urllib3.util.Timeout`` class.
+-  (Issue #231)
+-
+-* Fixed Python 3.4 support. (Issue #238)
+-
+-
+-1.7 (2013-08-14)
+-----------------
+-
+-* More exceptions are now pickle-able, with tests. (Issue #174)
+-
+-* Fixed redirecting with relative URLs in Location header. (Issue #178)
+-
+-* Support for relative urls in ``Location: ...`` header. (Issue #179)
+-
+-* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
+-  file-like functionality. (Issue #187)
+-
+-* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
+-  skip hostname verification for SSL connections. (Issue #194)
+-
+-* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
+-  generator wrapped around ``.read(...)``. (Issue #198)
+-
+-* IPv6 url parsing enforces brackets around the hostname. (Issue #199)
+-
+-* Fixed thread race condition in
+-  ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
+-
+-* ``ProxyManager`` requests now include non-default port in ``Host: ...``
+-  header. (Issue #217)
+-
+-* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
+-
+-* New ``RequestField`` object can be passed to the ``fields=...`` param which
+-  can specify headers. (Issue #220)
+-
+-* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
+-  (Issue #221)
+-
+-* Use international headers when posting file names. (Issue #119)
+-
+-* Improved IPv6 support. (Issue #203)
+-
+-
+-1.6 (2013-04-25)
+-----------------
+-
+-* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
+-
+-* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
+-
+-* Improved SSL-related code. ``cert_req`` now optionally takes a string like
+-  "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
+-  The string values reflect the suffix of the respective constant variable.
+-  (Issue #130)
+-
+-* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
+-  closed proxy connections and larger read buffers. (Issue #135)
+-
+-* Ensure the connection is closed if no data is received, fixes connection leak
+-  on some platforms. (Issue #133)
+-
+-* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
+-
+-* Tests fixed to be compatible with Py26 again. (Issue #125)
+-
+-* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
+-  to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
+-
+-* Allow an explicit content type to be specified when encoding file fields.
+-  (Issue #126)
+-
+-* Exceptions are now pickleable, with tests. (Issue #101)
+-
+-* Fixed default headers not getting passed in some cases. (Issue #99)
+-
+-* Treat "content-encoding" header value as case-insensitive, per RFC 2616
+-  Section 3.5. (Issue #110)
+-
+-* "Connection Refused" SocketErrors will get retried rather than raised.
+-  (Issue #92)
+-
+-* Updated vendored ``six``, no longer overrides the global ``six`` module
+-  namespace. (Issue #113)
+-
+-* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
+-  the exception that prompted the final retry. If ``reason is None`` then it
+-  was due to a redirect. (Issue #92, #114)
+-
+-* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
+-  (Issue #149)
+-
+-* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
+-  that are not files. (Issue #111)
+-
+-* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
+-
+-* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
+-  against an arbitrary hostname (when connecting by IP or for misconfigured
+-  servers). (Issue #140)
+-
+-* Streaming decompression support. (Issue #159)
+-
+-
+-1.5 (2012-08-02)
+-----------------
+-
+-* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
+-  logging in urllib3.
+-
+-* Native full URL parsing (including auth, path, query, fragment) available in
+-  ``urllib3.util.parse_url(url)``.
+-
+-* Built-in redirect will switch method to 'GET' if status code is 303.
+-  (Issue #11)
+-
+-* ``urllib3.PoolManager`` strips the scheme and host before sending the request
+-  uri. (Issue #8)
+-
+-* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
+-  based on the Content-Type header, fails.
+-
+-* Fixed bug with pool depletion and leaking connections (Issue #76). Added
+-  explicit connection closing on pool eviction. Added
+-  ``urllib3.PoolManager.clear()``.
+-
+-* 99% -> 100% unit test coverage.
+-
+-
+-1.4 (2012-06-16)
+-----------------
+-
+-* Minor AppEngine-related fixes.
+-
+-* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
+-
+-* Improved url parsing. (Issue #73)
+-
+-* IPv6 url support. (Issue #72)
+-
+-
+-1.3 (2012-03-25)
+-----------------
+-
+-* Removed pre-1.0 deprecated API.
+-
+-* Refactored helpers into a ``urllib3.util`` submodule.
+-
+-* Fixed multipart encoding to support list-of-tuples for keys with multiple
+-  values. (Issue #48)
+-
+-* Fixed multiple Set-Cookie headers in response not getting merged properly in
+-  Python 3. (Issue #53)
+-
+-* AppEngine support with Py27. (Issue #61)
+-
+-* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
+-  bytes.
+-
+-
+-1.2.2 (2012-02-06)
+-------------------
+-
+-* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
+-
+-
+-1.2.1 (2012-02-05)
+-------------------
+-
+-* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
+-
+-* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
+-  which inherits from ``ValueError``.
+-
+-
+-1.2 (2012-01-29)
+-----------------
+-
+-* Added Python 3 support (tested on 3.2.2)
+-
+-* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
+-
+-* Use ``select.poll`` instead of ``select.select`` for platforms that support
+-  it.
+-
+-* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
+-  connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
+-
+-* Fixed ``ImportError`` during install when ``ssl`` module is not available.
+-  (Issue #41)
+-
+-* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
+-  completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
+-
+-* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
+-  ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
+-  Added socket-level tests.
+-
+-* More tests. Achievement Unlocked: 99% Coverage.
+-
+-
+-1.1 (2012-01-07)
+-----------------
+-
+-* Refactored ``dummyserver`` to its own root namespace module (used for
+-  testing).
+-
+-* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
+-  Py32's ``ssl_match_hostname``. (Issue #25)
+-
+-* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
+-
+-* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
+-  #27)
+-
+-* Fixed timeout-related bugs. (Issues #17, #23)
+-
+-
+-1.0.2 (2011-11-04)
+-------------------
+-
+-* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
+-  you're using the object manually. (Thanks pyos)
+-
+-* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
+-  wrapping the access log in a mutex. (Thanks @christer)
+-
+-* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
+-  ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
+-
+-
+-1.0.1 (2011-10-10)
+-------------------
+-
+-* Fixed a bug where the same connection would get returned into the pool twice,
+-  causing extraneous "HttpConnectionPool is full" log warnings.
+-
+-
+-1.0 (2011-10-08)
+-----------------
+-
+-* Added ``PoolManager`` with LRU expiration of connections (tested and
+-  documented).
+-* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
+-  with HTTPS proxies).
+-* Added optional partial-read support for responses when
+-  ``preload_content=False``. You can now make requests and just read the headers
+-  without loading the content.
+-* Made response decoding optional (default on, same as before).
+-* Added optional explicit boundary string for ``encode_multipart_formdata``.
+-* Convenience request methods are now inherited from ``RequestMethods``. Old
+-  helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
+-  the new ``request(method, url, ...)``.
+-* Refactored code to be even more decoupled, reusable, and extendable.
+-* License header added to ``.py`` files.
+-* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
+-  and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
+-* Embettered all the things!
+-* Started writing this file.
+-
+-
+-0.4.1 (2011-07-17)
+-------------------
+-
+-* Minor bug fixes, code cleanup.
+-
+-
+-0.4 (2011-03-01)
+-----------------
+-
+-* Better unicode support.
+-* Added ``VerifiedHTTPSConnection``.
+-* Added ``NTLMConnectionPool`` in contrib.
+-* Minor improvements.
+-
+-
+-0.3.1 (2010-07-13)
+-------------------
+-
+-* Added ``assert_host_name`` optional parameter. Now compatible with proxies.
+-
+-
+-0.3 (2009-12-10)
+-----------------
+-
+-* Added HTTPS support.
+-* Minor bug fixes.
+-* Refactored, broken backwards compatibility with 0.2.
+-* API to be treated as stable from this version forward.
+-
+-
+-0.2 (2008-11-17)
+-----------------
+-
+-* Added unit tests.
+-* Bug fixes.
+-
+-
+-0.1 (2008-11-16)
+-----------------
+-
+-* First release.
+-
+-
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/RECORD b/third_party/python/urllib3/urllib3-1.25.9.dist-info/RECORD
+deleted file mode 100644
+--- a/third_party/python/urllib3/urllib3-1.25.9.dist-info/RECORD
++++ /dev/null
+@@ -1,41 +0,0 @@
+-urllib3/__init__.py,sha256=rdFZCO1L7e8861ZTvo8AiSKwxCe9SnWQUQwJ599YV9c,2683
+-urllib3/_collections.py,sha256=GouVsNzwg6jADZTmimMI6oqmwKSswnMo9dh5tGNVWO4,10792
+-urllib3/connection.py,sha256=Fln8a_bkegdNMkFoSOwyI0PJvL1OqzVUO6ifihKOTpc,14461
+-urllib3/connectionpool.py,sha256=egdaX-Db_LVXifDxv3JY0dHIpQqDv0wC0_9Eeh8FkPM,35725
+-urllib3/exceptions.py,sha256=D2Jvab7M7m_n0rnmBmq481paoVT32VvVeB6VeQM0y-w,7172
+-urllib3/fields.py,sha256=kroD76QK-GdHHW7f_AUN4XxDC3OQPI2FFrS9eSL4BCs,8553
+-urllib3/filepost.py,sha256=vj0qbrpT1AFzvvW4SuC8M5kJiw7wftHcSr-7b8UpPpw,2440
+-urllib3/poolmanager.py,sha256=iWEAIGrVNGoOmQyfiFwCqG-IyYy6GIQ-jJ9QCsX9li4,17861
+-urllib3/request.py,sha256=hhoHvEEatyd9Tn5EbGjQ0emn-ENMCyY591yNWTneINA,6018
+-urllib3/response.py,sha256=eo1Sfkn2x44FtjgP3qwwDsG9ak84spQAxEGy7Ovd4Pc,28221
+-urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+-urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
+-urllib3/contrib/appengine.py,sha256=9RyUW5vKy4VPa2imtwBNWYKILrypr-K6UXEHUYsf0JY,11010
+-urllib3/contrib/ntlmpool.py,sha256=a402AwGN_Ll3N-4ur_AS6UrU-ycUtlnYqoBF76lORg8,4160
+-urllib3/contrib/pyopenssl.py,sha256=qQKqQXvlSvpCa2yEPxpdv18lS71SMESr9XzH9K9x3KI,16565
+-urllib3/contrib/securetransport.py,sha256=vBDFjSnH2gWa-ztMKVaiwW46K1mlDZKqvo_VAonfdcY,32401
+-urllib3/contrib/socks.py,sha256=nzDMgDIFJWVubKHqvIn2-SKCO91hhJInP92WgHChGzA,7036
+-urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+-urllib3/contrib/_securetransport/bindings.py,sha256=mullWYFaghBdRWla6HYU-TBgFRTPLBEfxj3jplbeJmQ,16886
+-urllib3/contrib/_securetransport/low_level.py,sha256=V7GnujxnWZh2N2sMsV5N4d9Imymokkm3zBwgt77_bSE,11956
+-urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
+-urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
+-urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+-urllib3/packages/backports/makefile.py,sha256=005wrvH-_pWSnTFqQ2sdzzh4zVCtQUUQ4mR2Yyxwc0A,1418
+-urllib3/packages/ssl_match_hostname/__init__.py,sha256=ywgKMtfHi1-DrXlzPfVAhzsLzzqcK7GT6eLgdode1Fg,688
+-urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
+-urllib3/util/__init__.py,sha256=bWNaav_OT-1L7-sxm59cGb59rDORlbhb_4noduM5m0U,1038
+-urllib3/util/connection.py,sha256=NsxUAKQ98GKywta--zg57CdVpeTCI6N-GElCq78Dl8U,4637
+-urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497
+-urllib3/util/request.py,sha256=C-6-AWffxZG03AdRGoY59uqsn4CVItKU6gjxz7Hc3Mc,3815
+-urllib3/util/response.py,sha256=_WbTQr8xRQuJuY2rTIZxVdJD6mnEOtQupjaK_bF_Vj8,2573
+-urllib3/util/retry.py,sha256=3wbv7SdzYNOxPcBiFkPCubTbK1_6vWSepznOXirhUfA,15543
+-urllib3/util/ssl_.py,sha256=R64MEN6Bh-YJq8b14kCb6hbV8L1p8oq4rcZiBow3tTQ,14511
+-urllib3/util/timeout.py,sha256=3qawUo-TZq4q7tyeRToMIOdNGEOBjOOQVq7nHnLryP4,9947
+-urllib3/util/url.py,sha256=jvkBGN64wo_Mx6Q6JYpFCGxamxbI2NdFoNQVTr7PUOM,13964
+-urllib3/util/wait.py,sha256=k46KzqIYu3Vnzla5YW3EvtInNlU_QycFqQAghIOxoAg,5406
+-urllib3-1.25.9.dist-info/LICENSE.txt,sha256=fA0TbuBYU4mt8tJWcbuZaHofdZKfRlt_Fu4_Ado3JV4,1115
+-urllib3-1.25.9.dist-info/METADATA,sha256=QVc-HCXpe7Dm_RDmd-GpzKT-LvxBgwsPsLEiE5kUjEI,39852
+-urllib3-1.25.9.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110
+-urllib3-1.25.9.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
+-urllib3-1.25.9.dist-info/RECORD,,
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/WHEEL b/third_party/python/urllib3/urllib3-1.25.9.dist-info/WHEEL
+deleted file mode 100644
+--- a/third_party/python/urllib3/urllib3-1.25.9.dist-info/WHEEL
++++ /dev/null
+@@ -1,6 +0,0 @@
+-Wheel-Version: 1.0
+-Generator: bdist_wheel (0.34.2)
+-Root-Is-Purelib: true
+-Tag: py2-none-any
+-Tag: py3-none-any
+-
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt b/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt
+@@ -0,0 +1,21 @@
++MIT License
++
++Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
++
++Permission is hereby granted, free of charge, to any person obtaining a copy
++of this software and associated documentation files (the "Software"), to deal
++in the Software without restriction, including without limitation the rights
++to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
++copies of the Software, and to permit persons to whom the Software is
++furnished to do so, subject to the following conditions:
++
++The above copyright notice and this permission notice shall be included in all
++copies or substantial portions of the Software.
++
++THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
++IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
++FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
++AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
++LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
++OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
++SOFTWARE.
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA b/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA
+@@ -0,0 +1,1335 @@
++Metadata-Version: 2.1
++Name: urllib3
++Version: 1.26.0
++Summary: HTTP library with thread-safe connection pooling, file post, and more.
++Home-page: https://urllib3.readthedocs.io/
++Author: Andrey Petrov
++Author-email: andrey.petrov@shazow.net
++License: MIT
++Project-URL: Documentation, https://urllib3.readthedocs.io/
++Project-URL: Code, https://github.com/urllib3/urllib3
++Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
++Keywords: urllib httplib threadsafe filepost http https ssl pooling
++Platform: UNKNOWN
++Classifier: Environment :: Web Environment
++Classifier: Intended Audience :: Developers
++Classifier: License :: OSI Approved :: MIT License
++Classifier: Operating System :: OS Independent
++Classifier: Programming Language :: Python
++Classifier: Programming Language :: Python :: 2
++Classifier: Programming Language :: Python :: 2.7
++Classifier: Programming Language :: Python :: 3
++Classifier: Programming Language :: Python :: 3.5
++Classifier: Programming Language :: Python :: 3.6
++Classifier: Programming Language :: Python :: 3.7
++Classifier: Programming Language :: Python :: 3.8
++Classifier: Programming Language :: Python :: 3.9
++Classifier: Programming Language :: Python :: Implementation :: CPython
++Classifier: Programming Language :: Python :: Implementation :: PyPy
++Classifier: Topic :: Internet :: WWW/HTTP
++Classifier: Topic :: Software Development :: Libraries
++Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
++Description-Content-Type: text/x-rst
++Provides-Extra: brotli
++Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
++Provides-Extra: secure
++Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
++Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
++Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
++Requires-Dist: certifi ; extra == 'secure'
++Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
++Provides-Extra: socks
++Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
++
++
++urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
++Python ecosystem already uses urllib3 and you should too.
++urllib3 brings many critical features that are missing from the Python
++standard libraries:
++
++- Thread safety.
++- Connection pooling.
++- Client-side SSL/TLS verification.
++- File uploads with multipart encoding.
++- Helpers for retrying requests and dealing with HTTP redirects.
++- Support for gzip, deflate, and brotli encoding.
++- Proxy support for HTTP and SOCKS.
++- 100% test coverage.
++
++urllib3 is powerful and easy to use:
++
++.. code-block:: python
++
++    >>> import urllib3
++    >>> http = urllib3.PoolManager()
++    >>> r = http.request('GET', 'http://httpbin.org/robots.txt')
++    >>> r.status
++    200
++    >>> r.data
++    'User-agent: *\nDisallow: /deny\n'
++
++
++Installing
++----------
++
++urllib3 can be installed with `pip <https://pip.pypa.io>`_::
++
++    $ python -m pip install urllib3
++
++Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
++
++    $ git clone git://github.com/urllib3/urllib3.git
++    $ python setup.py install
++
++
++Documentation
++-------------
++
++urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
++
++
++Contributing
++------------
++
++urllib3 happily accepts contributions. Please see our
++`contributing documentation <https://urllib3.readthedocs.io/en/latest/contributing.html>`_
++for some tips on getting started.
++
++
++Security Disclosures
++--------------------
++
++To report a security vulnerability, please use the
++`Tidelift security contact <https://tidelift.com/security>`_.
++Tidelift will coordinate the fix and disclosure with maintainers.
++
++
++Maintainers
++-----------
++
++- `@sethmlarson <https://github.com/sethmlarson>`__ (Seth M. Larson)
++- `@pquentin <https://github.com/pquentin>`__ (Quentin Pradet)
++- `@theacodes <https://github.com/theacodes>`__ (Thea Flowers)
++- `@haikuginger <https://github.com/haikuginger>`__ (Jess Shapiro)
++- `@lukasa <https://github.com/lukasa>`__ (Cory Benfield)
++- `@sigmavirus24 <https://github.com/sigmavirus24>`__ (Ian Stapleton Cordasco)
++- `@shazow <https://github.com/shazow>`__ (Andrey Petrov)
++
++👋
++
++
++Sponsorship
++-----------
++
++If your company benefits from this library, please consider `sponsoring its
++development <https://urllib3.readthedocs.io/en/latest/sponsors.html>`_.
++
++
++For Enterprise
++--------------
++
++.. |tideliftlogo| image:: https://nedbatchelder.com/pix/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White_small.png
++   :width: 75
++   :alt: Tidelift
++
++.. list-table::
++   :widths: 10 100
++
++   * - |tideliftlogo|
++     - Professional support for urllib3 is available as part of the `Tidelift
++       Subscription`_.  Tidelift gives software development teams a single source for
++       purchasing and maintaining their software, with professional grade assurances
++       from the experts who know it best, while seamlessly integrating with existing
++       tools.
++
++.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
++
++
++Changes
++=======
++
++1.26.0 (2020-11-10)
++-------------------
++
++* **NOTE: urllib3 v2.0 will drop support for Python 2**.
++  `Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>`_.
++
++* Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
++
++* Deprecated negotiating TLSv1 and TLSv1.1 by default. Users that
++  still wish to use TLS earlier than 1.2 without a deprecation warning
++  should opt-in explicitly by setting ``ssl_version=ssl.PROTOCOL_TLSv1_1`` (Pull #2002)
++  **Starting in urllib3 v2.0: Connections that receive a ``DeprecationWarning`` will fail**
++
++* Deprecated ``Retry`` options ``Retry.DEFAULT_METHOD_WHITELIST``, ``Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST``
++  and ``Retry(method_whitelist=...)`` in favor of ``Retry.DEFAULT_ALLOWED_METHODS``,
++  ``Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT``, and ``Retry(allowed_methods=...)``
++  (Pull #2000) **Starting in urllib3 v2.0: Deprecated options will be removed**
++
++* Added default ``User-Agent`` header to every request (Pull #1750)
++
++* Added ``urllib3.util.SKIP_HEADER`` for skipping ``User-Agent``, ``Accept-Encoding``, 
++  and ``Host`` headers from being automatically emitted with requests (Pull #2018)
++
++* Collapse ``transfer-encoding: chunked`` request data and framing into
++  the same ``socket.send()`` call (Pull #1906)
++
++* Send ``http/1.1`` ALPN identifier with every TLS handshake by default (Pull #1894)
++
++* Properly terminate SecureTransport connections when CA verification fails (Pull #1977)
++
++* Don't emit an ``SNIMissingWarning`` when passing ``server_hostname=None``
++  to SecureTransport (Pull #1903)
++
++* Disabled requesting TLSv1.2 session tickets as they weren't being used by urllib3 (Pull #1970)
++
++* Suppress ``BrokenPipeError`` when writing request body after the server
++  has closed the socket (Pull #1524)
++
++* Wrap ``ssl.SSLError`` that can be raised from reading a socket (e.g. "bad MAC")
++  into an ``urllib3.exceptions.SSLError`` (Pull #1939)
++
++
++1.25.11 (2020-10-19)
++--------------------
++
++* Fix retry backoff time parsed from ``Retry-After`` header when given
++  in the HTTP date format. The HTTP date was parsed as the local timezone
++  rather than accounting for the timezone in the HTTP date (typically
++  UTC) (Pull #1932, Pull #1935, Pull #1938, Pull #1949)
++
++* Fix issue where an error would be raised when the ``SSLKEYLOGFILE``
++  environment variable was set to the empty string. Now ``SSLContext.keylog_file``
++  is not set in this situation (Pull #2016)
++
++
++1.25.10 (2020-07-22)
++--------------------
++
++* Added support for ``SSLKEYLOGFILE`` environment variable for
++  logging TLS session keys with use with programs like
++  Wireshark for decrypting captured web traffic (Pull #1867)
++
++* Fixed loading of SecureTransport libraries on macOS Big Sur
++  due to the new dynamic linker cache (Pull #1905)
++
++* Collapse chunked request bodies data and framing into one
++  call to ``send()`` to reduce the number of TCP packets by 2-4x (Pull #1906)
++
++* Don't insert ``None`` into ``ConnectionPool`` if the pool
++  was empty when requesting a connection (Pull #1866)
++
++* Avoid ``hasattr`` call in ``BrotliDecoder.decompress()`` (Pull #1858)
++
++
++1.25.9 (2020-04-16)
++-------------------
++
++* Added ``InvalidProxyConfigurationWarning`` which is raised when
++  erroneously specifying an HTTPS proxy URL. urllib3 doesn't currently
++  support connecting to HTTPS proxies but will soon be able to
++  and we would like users to migrate properly without much breakage.
++
++  See `this GitHub issue <https://github.com/urllib3/urllib3/issues/1850>`_
++  for more information on how to fix your proxy config. (Pull #1851)
++
++* Drain connection after ``PoolManager`` redirect (Pull #1817)
++
++* Ensure ``load_verify_locations`` raises ``SSLError`` for all backends (Pull #1812)
++
++* Rename ``VerifiedHTTPSConnection`` to ``HTTPSConnection`` (Pull #1805)
++
++* Allow the CA certificate data to be passed as a string (Pull #1804)
++
++* Raise ``ValueError`` if method contains control characters (Pull #1800)
++
++* Add ``__repr__`` to ``Timeout`` (Pull #1795)
++
++
++1.25.8 (2020-01-20)
++-------------------
++
++* Drop support for EOL Python 3.4 (Pull #1774)
++
++* Optimize _encode_invalid_chars (Pull #1787)
++
++
++1.25.7 (2019-11-11)
++-------------------
++
++* Preserve ``chunked`` parameter on retries (Pull #1715, Pull #1734)
++
++* Allow unset ``SERVER_SOFTWARE`` in App Engine (Pull #1704, Issue #1470)
++
++* Fix issue where URL fragment was sent within the request target. (Pull #1732)
++
++* Fix issue where an empty query section in a URL would fail to parse. (Pull #1732)
++
++* Remove TLS 1.3 support in SecureTransport due to Apple removing support (Pull #1703)
++
++
++1.25.6 (2019-09-24)
++-------------------
++
++* Fix issue where tilde (``~``) characters were incorrectly
++  percent-encoded in the path. (Pull #1692)
++
++
++1.25.5 (2019-09-19)
++-------------------
++
++* Add mitigation for BPO-37428 affecting Python <3.7.4 and OpenSSL 1.1.1+ which
++  caused certificate verification to be enabled when using ``cert_reqs=CERT_NONE``.
++  (Issue #1682)
++
++
++1.25.4 (2019-09-19)
++-------------------
++
++* Propagate Retry-After header settings to subsequent retries. (Pull #1607)
++
++* Fix edge case where Retry-After header was still respected even when
++  explicitly opted out of. (Pull #1607)
++
++* Remove dependency on ``rfc3986`` for URL parsing.
++
++* Fix issue where URLs containing invalid characters within ``Url.auth`` would
++  raise an exception instead of percent-encoding those characters.
++
++* Add support for ``HTTPResponse.auto_close = False`` which makes HTTP responses
++  work well with BufferedReaders and other ``io`` module features. (Pull #1652)
++
++* Percent-encode invalid characters in URL for ``HTTPConnectionPool.request()`` (Pull #1673)
++
++
++1.25.3 (2019-05-23)
++-------------------
++
++* Change ``HTTPSConnection`` to load system CA certificates
++  when ``ca_certs``, ``ca_cert_dir``, and ``ssl_context`` are
++  unspecified. (Pull #1608, Issue #1603)
++
++* Upgrade bundled rfc3986 to v1.3.2. (Pull #1609, Issue #1605)
++
++
++1.25.2 (2019-04-28)
++-------------------
++
++* Change ``is_ipaddress`` to not detect IPvFuture addresses. (Pull #1583)
++
++* Change ``parse_url`` to percent-encode invalid characters within the
++  path, query, and target components. (Pull #1586)
++
++
++1.25.1 (2019-04-24)
++-------------------
++
++* Add support for Google's ``Brotli`` package. (Pull #1572, Pull #1579)
++
++* Upgrade bundled rfc3986 to v1.3.1 (Pull #1578)
++
++
++1.25 (2019-04-22)
++-----------------
++
++* Require and validate certificates by default when using HTTPS (Pull #1507)
++
++* Upgraded ``urllib3.utils.parse_url()`` to be RFC 3986 compliant. (Pull #1487)
++
++* Added support for ``key_password`` for ``HTTPSConnectionPool`` to use
++  encrypted ``key_file`` without creating your own ``SSLContext`` object. (Pull #1489)
++
++* Add TLSv1.3 support to CPython, pyOpenSSL, and SecureTransport ``SSLContext``
++  implementations. (Pull #1496)
++
++* Switched the default multipart header encoder from RFC 2231 to HTML 5 working draft. (Issue #303, Pull #1492)
++
++* Fixed issue where OpenSSL would block if an encrypted client private key was
++  given and no password was given. Instead an ``SSLError`` is raised. (Pull #1489)
++
++* Added support for Brotli content encoding. It is enabled automatically if
++  ``brotlipy`` package is installed which can be requested with
++  ``urllib3[brotli]`` extra. (Pull #1532)
++
++* Drop ciphers using DSS key exchange from default TLS cipher suites.
++  Improve default ciphers when using SecureTransport. (Pull #1496)
++
++* Implemented a more efficient ``HTTPResponse.__iter__()`` method. (Issue #1483)
++
++1.24.3 (2019-05-01)
++-------------------
++
++* Apply fix for CVE-2019-9740. (Pull #1591)
++
++1.24.2 (2019-04-17)
++-------------------
++
++* Don't load system certificates by default when any other ``ca_certs``, ``ca_certs_dir`` or
++  ``ssl_context`` parameters are specified.
++
++* Remove Authorization header regardless of case when redirecting to cross-site. (Issue #1510)
++
++* Add support for IPv6 addresses in subjectAltName section of certificates. (Issue #1269)
++
++
++1.24.1 (2018-11-02)
++-------------------
++
++* Remove quadratic behavior within ``GzipDecoder.decompress()`` (Issue #1467)
++
++* Restored functionality of ``ciphers`` parameter for ``create_urllib3_context()``. (Issue #1462)
++
++
++1.24 (2018-10-16)
++-----------------
++
++* Allow key_server_hostname to be specified when initializing a PoolManager to allow custom SNI to be overridden. (Pull #1449)
++
++* Test against Python 3.7 on AppVeyor. (Pull #1453)
++
++* Early-out ipv6 checks when running on App Engine. (Pull #1450)
++
++* Change ambiguous description of backoff_factor (Pull #1436)
++
++* Add ability to handle multiple Content-Encodings (Issue #1441 and Pull #1442)
++
++* Skip DNS names that can't be idna-decoded when using pyOpenSSL (Issue #1405).
++
++* Add a server_hostname parameter to HTTPSConnection which allows for
++  overriding the SNI hostname sent in the handshake. (Pull #1397)
++
++* Drop support for EOL Python 2.6 (Pull #1429 and Pull #1430)
++
++* Fixed bug where responses with header Content-Type: message/* erroneously
++  raised HeaderParsingError, resulting in a warning being logged. (Pull #1439)
++
++* Move urllib3 to src/urllib3 (Pull #1409)
++
++
++1.23 (2018-06-04)
++-----------------
++
++* Allow providing a list of headers to strip from requests when redirecting
++  to a different host. Defaults to the ``Authorization`` header. Different
++  headers can be set via ``Retry.remove_headers_on_redirect``. (Issue #1316)
++
++* Fix ``util.selectors._fileobj_to_fd`` to accept ``long`` (Issue #1247).
++
++* Dropped Python 3.3 support. (Pull #1242)
++
++* Put the connection back in the pool when calling stream() or read_chunked() on
++  a chunked HEAD response. (Issue #1234)
++
++* Fixed pyOpenSSL-specific ssl client authentication issue when clients
++  attempted to auth via certificate + chain (Issue #1060)
++
++* Add the port to the connectionpool connect print (Pull #1251)
++
++* Don't use the ``uuid`` module to create multipart data boundaries. (Pull #1380)
++
++* ``read_chunked()`` on a closed response returns no chunks. (Issue #1088)
++
++* Add Python 2.6 support to ``contrib.securetransport`` (Pull #1359)
++
++* Added support for auth info in url for SOCKS proxy (Pull #1363)
++
++
++1.22 (2017-07-20)
++-----------------
++
++* Fixed missing brackets in ``HTTP CONNECT`` when connecting to IPv6 address via
++  IPv6 proxy. (Issue #1222)
++
++* Made the connection pool retry on ``SSLError``.  The original ``SSLError``
++  is available on ``MaxRetryError.reason``. (Issue #1112)
++
++* Drain and release connection before recursing on retry/redirect.  Fixes
++  deadlocks with a blocking connectionpool. (Issue #1167)
++
++* Fixed compatibility for cookiejar. (Issue #1229)
++
++* pyopenssl: Use vendored version of ``six``. (Issue #1231)
++
++
++1.21.1 (2017-05-02)
++-------------------
++
++* Fixed SecureTransport issue that would cause long delays in response body
++  delivery. (Pull #1154)
++
++* Fixed regression in 1.21 that threw exceptions when users passed the
++  ``socket_options`` flag to the ``PoolManager``.  (Issue #1165)
++
++* Fixed regression in 1.21 that threw exceptions when users passed the
++  ``assert_hostname`` or ``assert_fingerprint`` flag to the ``PoolManager``.
++  (Pull #1157)
++
++
++1.21 (2017-04-25)
++-----------------
++
++* Improved performance of certain selector system calls on Python 3.5 and
++  later. (Pull #1095)
++
++* Resolved issue where the PyOpenSSL backend would not wrap SysCallError
++  exceptions appropriately when sending data. (Pull #1125)
++
++* Selectors now detects a monkey-patched select module after import for modules
++  that patch the select module like eventlet, greenlet. (Pull #1128)
++
++* Reduced memory consumption when streaming zlib-compressed responses
++  (as opposed to raw deflate streams). (Pull #1129)
++
++* Connection pools now use the entire request context when constructing the
++  pool key. (Pull #1016)
++
++* ``PoolManager.connection_from_*`` methods now accept a new keyword argument,
++  ``pool_kwargs``, which are merged with the existing ``connection_pool_kw``.
++  (Pull #1016)
++
++* Add retry counter for ``status_forcelist``. (Issue #1147)
++
++* Added ``contrib`` module for using SecureTransport on macOS:
++  ``urllib3.contrib.securetransport``.  (Pull #1122)
++
++* urllib3 now only normalizes the case of ``http://`` and ``https://`` schemes:
++  for schemes it does not recognise, it assumes they are case-sensitive and
++  leaves them unchanged.
++  (Issue #1080)
++
++
++1.20 (2017-01-19)
++-----------------
++
++* Added support for waiting for I/O using selectors other than select,
++  improving urllib3's behaviour with large numbers of concurrent connections.
++  (Pull #1001)
++
++* Updated the date for the system clock check. (Issue #1005)
++
++* ConnectionPools now correctly consider hostnames to be case-insensitive.
++  (Issue #1032)
++
++* Outdated versions of PyOpenSSL now cause the PyOpenSSL contrib module
++  to fail when it is injected, rather than at first use. (Pull #1063)
++
++* Outdated versions of cryptography now cause the PyOpenSSL contrib module
++  to fail when it is injected, rather than at first use. (Issue #1044)
++
++* Automatically attempt to rewind a file-like body object when a request is
++  retried or redirected. (Pull #1039)
++
++* Fix some bugs that occur when modules incautiously patch the queue module.
++  (Pull #1061)
++
++* Prevent retries from occurring on read timeouts for which the request method
++  was not in the method whitelist. (Issue #1059)
++
++* Changed the PyOpenSSL contrib module to lazily load idna to avoid
++  unnecessarily bloating the memory of programs that don't need it. (Pull
++  #1076)
++
++* Add support for IPv6 literals with zone identifiers. (Pull #1013)
++
++* Added support for socks5h:// and socks4a:// schemes when working with SOCKS
++  proxies, and controlled remote DNS appropriately. (Issue #1035)
++
++
++1.19.1 (2016-11-16)
++-------------------
++
++* Fixed AppEngine import that didn't function on Python 3.5. (Pull #1025)
++
++
++1.19 (2016-11-03)
++-----------------
++
++* urllib3 now respects Retry-After headers on 413, 429, and 503 responses when
++  using the default retry logic. (Pull #955)
++
++* Remove markers from setup.py to assist ancient setuptools versions. (Issue
++  #986)
++
++* Disallow superscripts and other integerish things in URL ports. (Issue #989)
++
++* Allow urllib3's HTTPResponse.stream() method to continue to work with
++  non-httplib underlying FPs. (Pull #990)
++
++* Empty filenames in multipart headers are now emitted as such, rather than
++  being suppressed. (Issue #1015)
++
++* Prefer user-supplied Host headers on chunked uploads. (Issue #1009)
++
++
++1.18.1 (2016-10-27)
++-------------------
++
++* CVE-2016-9015. Users who are using urllib3 version 1.17 or 1.18 along with
++  PyOpenSSL injection and OpenSSL 1.1.0 *must* upgrade to this version. This
++  release fixes a vulnerability whereby urllib3 in the above configuration
++  would silently fail to validate TLS certificates due to erroneously setting
++  invalid flags in OpenSSL's ``SSL_CTX_set_verify`` function. These erroneous
++  flags do not cause a problem in OpenSSL versions before 1.1.0, which
++  interprets the presence of any flag as requesting certificate validation.
++
++  There is no PR for this patch, as it was prepared for simultaneous disclosure
++  and release. The master branch received the same fix in Pull #1010.
++
++
++1.18 (2016-09-26)
++-----------------
++
++* Fixed incorrect message for IncompleteRead exception. (Pull #973)
++
++* Accept ``iPAddress`` subject alternative name fields in TLS certificates.
++  (Issue #258)
++
++* Fixed consistency of ``HTTPResponse.closed`` between Python 2 and 3.
++  (Issue #977)
++
++* Fixed handling of wildcard certificates when using PyOpenSSL. (Issue #979)
++
++
++1.17 (2016-09-06)
++-----------------
++
++* Accept ``SSLContext`` objects for use in SSL/TLS negotiation. (Issue #835)
++
++* ConnectionPool debug log now includes scheme, host, and port. (Issue #897)
++
++* Substantially refactored documentation. (Issue #887)
++
++* Used URLFetch default timeout on AppEngine, rather than hardcoding our own.
++  (Issue #858)
++
++* Normalize the scheme and host in the URL parser (Issue #833)
++
++* ``HTTPResponse`` contains the last ``Retry`` object, which now also
++  contains retries history. (Issue #848)
++
++* Timeout can no longer be set as boolean, and must be greater than zero.
++  (Pull #924)
++
++* Removed pyasn1 and ndg-httpsclient from dependencies used for PyOpenSSL. We
++  now use cryptography and idna, both of which are already dependencies of
++  PyOpenSSL. (Pull #930)
++
++* Fixed infinite loop in ``stream`` when amt=None. (Issue #928)
++
++* Try to use the operating system's certificates when we are using an
++  ``SSLContext``. (Pull #941)
++
++* Updated cipher suite list to allow ChaCha20+Poly1305. AES-GCM is preferred to
++  ChaCha20, but ChaCha20 is then preferred to everything else. (Pull #947)
++
++* Updated cipher suite list to remove 3DES-based cipher suites. (Pull #958)
++
++* Removed the cipher suite fallback to allow HIGH ciphers. (Pull #958)
++
++* Implemented ``length_remaining`` to determine remaining content
++  to be read. (Pull #949)
++
++* Implemented ``enforce_content_length`` to enable exceptions when
++  incomplete data chunks are received. (Pull #949)
++
++* Dropped connection start, dropped connection reset, redirect, forced retry,
++  and new HTTPS connection log levels to DEBUG, from INFO. (Pull #967)
++
++
++1.16 (2016-06-11)
++-----------------
++
++* Disable IPv6 DNS when IPv6 connections are not possible. (Issue #840)
++
++* Provide ``key_fn_by_scheme`` pool keying mechanism that can be
++  overridden. (Issue #830)
++
++* Normalize scheme and host to lowercase for pool keys, and include
++  ``source_address``. (Issue #830)
++
++* Cleaner exception chain in Python 3 for ``_make_request``.
++  (Issue #861)
++
++* Fixed installing ``urllib3[socks]`` extra. (Issue #864)
++
++* Fixed signature of ``ConnectionPool.close`` so it can actually safely be
++  called by subclasses. (Issue #873)
++
++* Retain ``release_conn`` state across retries. (Issues #651, #866)
++
++* Add customizable ``HTTPConnectionPool.ResponseCls``, which defaults to
++  ``HTTPResponse`` but can be replaced with a subclass. (Issue #879)
++
++
++1.15.1 (2016-04-11)
++-------------------
++
++* Fix packaging to include backports module. (Issue #841)
++
++
++1.15 (2016-04-06)
++-----------------
++
++* Added Retry(raise_on_status=False). (Issue #720)
++
++* Always use setuptools, no more distutils fallback. (Issue #785)
++
++* Dropped support for Python 3.2. (Issue #786)
++
++* Chunked transfer encoding when requesting with ``chunked=True``.
++  (Issue #790)
++
++* Fixed regression with IPv6 port parsing. (Issue #801)
++
++* Append SNIMissingWarning messages to allow users to specify it in
++  the PYTHONWARNINGS environment variable. (Issue #816)
++
++* Handle unicode headers in Py2. (Issue #818)
++
++* Log certificate when there is a hostname mismatch. (Issue #820)
++
++* Preserve order of request/response headers. (Issue #821)
++
++
++1.14 (2015-12-29)
++-----------------
++
++* contrib: SOCKS proxy support! (Issue #762)
++
++* Fixed AppEngine handling of transfer-encoding header and bug
++  in Timeout defaults checking. (Issue #763)
++
++
++1.13.1 (2015-12-18)
++-------------------
++
++* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
++
++
++1.13 (2015-12-14)
++-----------------
++
++* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
++
++* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
++
++* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
++
++* Close connections more defensively on exception. (Issue #734)
++
++* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
++  repeatedly flushing the decoder, to function better on Jython. (Issue #743)
++
++* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
++
++
++1.12 (2015-09-03)
++-----------------
++
++* Rely on ``six`` for importing ``httplib`` to work around
++  conflicts with other Python 3 shims. (Issue #688)
++
++* Add support for directories of certificate authorities, as supported by
++  OpenSSL. (Issue #701)
++
++* New exception: ``NewConnectionError``, raised when we fail to establish
++  a new connection, usually ``ECONNREFUSED`` socket error.
++
++
++1.11 (2015-07-21)
++-----------------
++
++* When ``ca_certs`` is given, ``cert_reqs`` defaults to
++  ``'CERT_REQUIRED'``. (Issue #650)
++
++* ``pip install urllib3[secure]`` will install Certifi and
++  PyOpenSSL as dependencies. (Issue #678)
++
++* Made ``HTTPHeaderDict`` usable as a ``headers`` input value
++  (Issues #632, #679)
++
++* Added `urllib3.contrib.appengine <https://urllib3.readthedocs.io/en/latest/contrib.html#google-app-engine>`_
++  which has an ``AppEngineManager`` for using ``URLFetch`` in a
++  Google AppEngine environment. (Issue #664)
++
++* Dev: Added test suite for AppEngine. (Issue #631)
++
++* Fix performance regression when using PyOpenSSL. (Issue #626)
++
++* Passing incorrect scheme (e.g. ``foo://``) will raise
++  ``ValueError`` instead of ``AssertionError`` (backwards
++  compatible for now, but please migrate). (Issue #640)
++
++* Fix pools not getting replenished when an error occurs during a
++  request using ``release_conn=False``. (Issue #644)
++
++* Fix pool-default headers not applying for url-encoded requests
++  like GET. (Issue #657)
++
++* log.warning in Python 3 when headers are skipped due to parsing
++  errors. (Issue #642)
++
++* Close and discard connections if an error occurs during read.
++  (Issue #660)
++
++* Fix host parsing for IPv6 proxies. (Issue #668)
++
++* Separate warning type SubjectAltNameWarning, now issued once
++  per host. (Issue #671)
++
++* Fix ``httplib.IncompleteRead`` not getting converted to
++  ``ProtocolError`` when using ``HTTPResponse.stream()``
++  (Issue #674)
++
++1.10.4 (2015-05-03)
++-------------------
++
++* Migrate tests to Tornado 4. (Issue #594)
++
++* Append default warning configuration rather than overwrite.
++  (Issue #603)
++
++* Fix streaming decoding regression. (Issue #595)
++
++* Fix chunked requests losing state across keep-alive connections.
++  (Issue #599)
++
++* Fix hanging when chunked HEAD response has no body. (Issue #605)
++
++
++1.10.3 (2015-04-21)
++-------------------
++
++* Emit ``InsecurePlatformWarning`` when SSLContext object is missing.
++  (Issue #558)
++
++* Fix regression of duplicate header keys being discarded.
++  (Issue #563)
++
++* ``Response.stream()`` returns a generator for chunked responses.
++  (Issue #560)
++
++* Set upper-bound timeout when waiting for a socket in PyOpenSSL.
++  (Issue #585)
++
++* Work on platforms without `ssl` module for plain HTTP requests.
++  (Issue #587)
++
++* Stop relying on the stdlib's default cipher list. (Issue #588)
++
++
++1.10.2 (2015-02-25)
++-------------------
++
++* Fix file descriptor leakage on retries. (Issue #548)
++
++* Removed RC4 from default cipher list. (Issue #551)
++
++* Header performance improvements. (Issue #544)
++
++* Fix PoolManager not obeying redirect retry settings. (Issue #553)
++
++
++1.10.1 (2015-02-10)
++-------------------
++
++* Pools can be used as context managers. (Issue #545)
++
++* Don't re-use connections which experienced an SSLError. (Issue #529)
++
++* Don't fail when gzip decoding an empty stream. (Issue #535)
++
++* Add sha256 support for fingerprint verification. (Issue #540)
++
++* Fixed handling of header values containing commas. (Issue #533)
++
++
++1.10 (2014-12-14)
++-----------------
++
++* Disabled SSLv3. (Issue #473)
++
++* Add ``Url.url`` property to return the composed url string. (Issue #394)
++
++* Fixed PyOpenSSL + gevent ``WantWriteError``. (Issue #412)
++
++* ``MaxRetryError.reason`` will always be an exception, not string.
++  (Issue #481)
++
++* Fixed SSL-related timeouts not being detected as timeouts. (Issue #492)
++
++* Py3: Use ``ssl.create_default_context()`` when available. (Issue #473)
++
++* Emit ``InsecureRequestWarning`` for *every* insecure HTTPS request.
++  (Issue #496)
++
++* Emit ``SecurityWarning`` when certificate has no ``subjectAltName``.
++  (Issue #499)
++
++* Close and discard sockets which experienced SSL-related errors.
++  (Issue #501)
++
++* Handle ``body`` param in ``.request(...)``. (Issue #513)
++
++* Respect timeout with HTTPS proxy. (Issue #505)
++
++* PyOpenSSL: Handle ZeroReturnError exception. (Issue #520)
++
++
++1.9.1 (2014-09-13)
++------------------
++
++* Apply socket arguments before binding. (Issue #427)
++
++* More careful checks if fp-like object is closed. (Issue #435)
++
++* Fixed packaging issues of some development-related files not
++  getting included. (Issue #440)
++
++* Allow performing *only* fingerprint verification. (Issue #444)
++
++* Emit ``SecurityWarning`` if system clock is waaay off. (Issue #445)
++
++* Fixed PyOpenSSL compatibility with PyPy. (Issue #450)
++
++* Fixed ``BrokenPipeError`` and ``ConnectionError`` handling in Py3.
++  (Issue #443)
++
++
++
++1.9 (2014-07-04)
++----------------
++
++* Shuffled around development-related files. If you're maintaining a distro
++  package of urllib3, you may need to tweak things. (Issue #415)
++
++* Unverified HTTPS requests will trigger a warning on the first request. See
++  our new `security documentation
++  <https://urllib3.readthedocs.io/en/latest/security.html>`_ for details.
++  (Issue #426)
++
++* New retry logic and ``urllib3.util.retry.Retry`` configuration object.
++  (Issue #326)
++
++* All raised exceptions should now wrapped in a
++  ``urllib3.exceptions.HTTPException``-extending exception. (Issue #326)
++
++* All errors during a retry-enabled request should be wrapped in
++  ``urllib3.exceptions.MaxRetryError``, including timeout-related exceptions
++  which were previously exempt. Underlying error is accessible from the
++  ``.reason`` property. (Issue #326)
++
++* ``urllib3.exceptions.ConnectionError`` renamed to
++  ``urllib3.exceptions.ProtocolError``. (Issue #326)
++
++* Errors during response read (such as IncompleteRead) are now wrapped in
++  ``urllib3.exceptions.ProtocolError``. (Issue #418)
++
++* Requesting an empty host will raise ``urllib3.exceptions.LocationValueError``.
++  (Issue #417)
++
++* Catch read timeouts over SSL connections as
++  ``urllib3.exceptions.ReadTimeoutError``. (Issue #419)
++
++* Apply socket arguments before connecting. (Issue #427)
++
++
++1.8.3 (2014-06-23)
++------------------
++
++* Fix TLS verification when using a proxy in Python 3.4.1. (Issue #385)
++
++* Add ``disable_cache`` option to ``urllib3.util.make_headers``. (Issue #393)
++
++* Wrap ``socket.timeout`` exception with
++  ``urllib3.exceptions.ReadTimeoutError``. (Issue #399)
++
++* Fixed proxy-related bug where connections were being reused incorrectly.
++  (Issues #366, #369)
++
++* Added ``socket_options`` keyword parameter which allows to define
++  ``setsockopt`` configuration of new sockets. (Issue #397)
++
++* Removed ``HTTPConnection.tcp_nodelay`` in favor of
++  ``HTTPConnection.default_socket_options``. (Issue #397)
++
++* Fixed ``TypeError`` bug in Python 2.6.4. (Issue #411)
++
++
++1.8.2 (2014-04-17)
++------------------
++
++* Fix ``urllib3.util`` not being included in the package.
++
++
++1.8.1 (2014-04-17)
++------------------
++
++* Fix AppEngine bug of HTTPS requests going out as HTTP. (Issue #356)
++
++* Don't install ``dummyserver`` into ``site-packages`` as it's only needed
++  for the test suite. (Issue #362)
++
++* Added support for specifying ``source_address``. (Issue #352)
++
++
++1.8 (2014-03-04)
++----------------
++
++* Improved url parsing in ``urllib3.util.parse_url`` (properly parse '@' in
++  username, and blank ports like 'hostname:').
++
++* New ``urllib3.connection`` module which contains all the HTTPConnection
++  objects.
++
++* Several ``urllib3.util.Timeout``-related fixes. Also changed constructor
++  signature to a more sensible order. [Backwards incompatible]
++  (Issues #252, #262, #263)
++
++* Use ``backports.ssl_match_hostname`` if it's installed. (Issue #274)
++
++* Added ``.tell()`` method to ``urllib3.response.HTTPResponse`` which
++  returns the number of bytes read so far. (Issue #277)
++
++* Support for platforms without threading. (Issue #289)
++
++* Expand default-port comparison in ``HTTPConnectionPool.is_same_host``
++  to allow a pool with no specified port to be considered equal to to an
++  HTTP/HTTPS url with port 80/443 explicitly provided. (Issue #305)
++
++* Improved default SSL/TLS settings to avoid vulnerabilities.
++  (Issue #309)
++
++* Fixed ``urllib3.poolmanager.ProxyManager`` not retrying on connect errors.
++  (Issue #310)
++
++* Disable Nagle's Algorithm on the socket for non-proxies. A subset of requests
++  will send the entire HTTP request ~200 milliseconds faster; however, some of
++  the resulting TCP packets will be smaller. (Issue #254)
++
++* Increased maximum number of SubjectAltNames in ``urllib3.contrib.pyopenssl``
++  from the default 64 to 1024 in a single certificate. (Issue #318)
++
++* Headers are now passed and stored as a custom
++  ``urllib3.collections_.HTTPHeaderDict`` object rather than a plain ``dict``.
++  (Issue #329, #333)
++
++* Headers no longer lose their case on Python 3. (Issue #236)
++
++* ``urllib3.contrib.pyopenssl`` now uses the operating system's default CA
++  certificates on inject. (Issue #332)
++
++* Requests with ``retries=False`` will immediately raise any exceptions without
++  wrapping them in ``MaxRetryError``. (Issue #348)
++
++* Fixed open socket leak with SSL-related failures. (Issue #344, #348)
++
++
++1.7.1 (2013-09-25)
++------------------
++
++* Added granular timeout support with new ``urllib3.util.Timeout`` class.
++  (Issue #231)
++
++* Fixed Python 3.4 support. (Issue #238)
++
++
++1.7 (2013-08-14)
++----------------
++
++* More exceptions are now pickle-able, with tests. (Issue #174)
++
++* Fixed redirecting with relative URLs in Location header. (Issue #178)
++
++* Support for relative urls in ``Location: ...`` header. (Issue #179)
++
++* ``urllib3.response.HTTPResponse`` now inherits from ``io.IOBase`` for bonus
++  file-like functionality. (Issue #187)
++
++* Passing ``assert_hostname=False`` when creating a HTTPSConnectionPool will
++  skip hostname verification for SSL connections. (Issue #194)
++
++* New method ``urllib3.response.HTTPResponse.stream(...)`` which acts as a
++  generator wrapped around ``.read(...)``. (Issue #198)
++
++* IPv6 url parsing enforces brackets around the hostname. (Issue #199)
++
++* Fixed thread race condition in
++  ``urllib3.poolmanager.PoolManager.connection_from_host(...)`` (Issue #204)
++
++* ``ProxyManager`` requests now include non-default port in ``Host: ...``
++  header. (Issue #217)
++
++* Added HTTPS proxy support in ``ProxyManager``. (Issue #170 #139)
++
++* New ``RequestField`` object can be passed to the ``fields=...`` param which
++  can specify headers. (Issue #220)
++
++* Raise ``urllib3.exceptions.ProxyError`` when connecting to proxy fails.
++  (Issue #221)
++
++* Use international headers when posting file names. (Issue #119)
++
++* Improved IPv6 support. (Issue #203)
++
++
++1.6 (2013-04-25)
++----------------
++
++* Contrib: Optional SNI support for Py2 using PyOpenSSL. (Issue #156)
++
++* ``ProxyManager`` automatically adds ``Host: ...`` header if not given.
++
++* Improved SSL-related code. ``cert_req`` now optionally takes a string like
++  "REQUIRED" or "NONE". Same with ``ssl_version`` takes strings like "SSLv23"
++  The string values reflect the suffix of the respective constant variable.
++  (Issue #130)
++
++* Vendored ``socksipy`` now based on Anorov's fork which handles unexpectedly
++  closed proxy connections and larger read buffers. (Issue #135)
++
++* Ensure the connection is closed if no data is received, fixes connection leak
++  on some platforms. (Issue #133)
++
++* Added SNI support for SSL/TLS connections on Py32+. (Issue #89)
++
++* Tests fixed to be compatible with Py26 again. (Issue #125)
++
++* Added ability to choose SSL version by passing an ``ssl.PROTOCOL_*`` constant
++  to the ``ssl_version`` parameter of ``HTTPSConnectionPool``. (Issue #109)
++
++* Allow an explicit content type to be specified when encoding file fields.
++  (Issue #126)
++
++* Exceptions are now pickleable, with tests. (Issue #101)
++
++* Fixed default headers not getting passed in some cases. (Issue #99)
++
++* Treat "content-encoding" header value as case-insensitive, per RFC 2616
++  Section 3.5. (Issue #110)
++
++* "Connection Refused" SocketErrors will get retried rather than raised.
++  (Issue #92)
++
++* Updated vendored ``six``, no longer overrides the global ``six`` module
++  namespace. (Issue #113)
++
++* ``urllib3.exceptions.MaxRetryError`` contains a ``reason`` property holding
++  the exception that prompted the final retry. If ``reason is None`` then it
++  was due to a redirect. (Issue #92, #114)
++
++* Fixed ``PoolManager.urlopen()`` from not redirecting more than once.
++  (Issue #149)
++
++* Don't assume ``Content-Type: text/plain`` for multi-part encoding parameters
++  that are not files. (Issue #111)
++
++* Pass `strict` param down to ``httplib.HTTPConnection``. (Issue #122)
++
++* Added mechanism to verify SSL certificates by fingerprint (md5, sha1) or
++  against an arbitrary hostname (when connecting by IP or for misconfigured
++  servers). (Issue #140)
++
++* Streaming decompression support. (Issue #159)
++
++
++1.5 (2012-08-02)
++----------------
++
++* Added ``urllib3.add_stderr_logger()`` for quickly enabling STDERR debug
++  logging in urllib3.
++
++* Native full URL parsing (including auth, path, query, fragment) available in
++  ``urllib3.util.parse_url(url)``.
++
++* Built-in redirect will switch method to 'GET' if status code is 303.
++  (Issue #11)
++
++* ``urllib3.PoolManager`` strips the scheme and host before sending the request
++  uri. (Issue #8)
++
++* New ``urllib3.exceptions.DecodeError`` exception for when automatic decoding,
++  based on the Content-Type header, fails.
++
++* Fixed bug with pool depletion and leaking connections (Issue #76). Added
++  explicit connection closing on pool eviction. Added
++  ``urllib3.PoolManager.clear()``.
++
++* 99% -> 100% unit test coverage.
++
++
++1.4 (2012-06-16)
++----------------
++
++* Minor AppEngine-related fixes.
++
++* Switched from ``mimetools.choose_boundary`` to ``uuid.uuid4()``.
++
++* Improved url parsing. (Issue #73)
++
++* IPv6 url support. (Issue #72)
++
++
++1.3 (2012-03-25)
++----------------
++
++* Removed pre-1.0 deprecated API.
++
++* Refactored helpers into a ``urllib3.util`` submodule.
++
++* Fixed multipart encoding to support list-of-tuples for keys with multiple
++  values. (Issue #48)
++
++* Fixed multiple Set-Cookie headers in response not getting merged properly in
++  Python 3. (Issue #53)
++
++* AppEngine support with Py27. (Issue #61)
++
++* Minor ``encode_multipart_formdata`` fixes related to Python 3 strings vs
++  bytes.
++
++
++1.2.2 (2012-02-06)
++------------------
++
++* Fixed packaging bug of not shipping ``test-requirements.txt``. (Issue #47)
++
++
++1.2.1 (2012-02-05)
++------------------
++
++* Fixed another bug related to when ``ssl`` module is not available. (Issue #41)
++
++* Location parsing errors now raise ``urllib3.exceptions.LocationParseError``
++  which inherits from ``ValueError``.
++
++
++1.2 (2012-01-29)
++----------------
++
++* Added Python 3 support (tested on 3.2.2)
++
++* Dropped Python 2.5 support (tested on 2.6.7, 2.7.2)
++
++* Use ``select.poll`` instead of ``select.select`` for platforms that support
++  it.
++
++* Use ``Queue.LifoQueue`` instead of ``Queue.Queue`` for more aggressive
++  connection reusing. Configurable by overriding ``ConnectionPool.QueueCls``.
++
++* Fixed ``ImportError`` during install when ``ssl`` module is not available.
++  (Issue #41)
++
++* Fixed ``PoolManager`` redirects between schemes (such as HTTP -> HTTPS) not
++  completing properly. (Issue #28, uncovered by Issue #10 in v1.1)
++
++* Ported ``dummyserver`` to use ``tornado`` instead of ``webob`` +
++  ``eventlet``. Removed extraneous unsupported dummyserver testing backends.
++  Added socket-level tests.
++
++* More tests. Achievement Unlocked: 99% Coverage.
++
++
++1.1 (2012-01-07)
++----------------
++
++* Refactored ``dummyserver`` to its own root namespace module (used for
++  testing).
++
++* Added hostname verification for ``VerifiedHTTPSConnection`` by vendoring in
++  Py32's ``ssl_match_hostname``. (Issue #25)
++
++* Fixed cross-host HTTP redirects when using ``PoolManager``. (Issue #10)
++
++* Fixed ``decode_content`` being ignored when set through ``urlopen``. (Issue
++  #27)
++
++* Fixed timeout-related bugs. (Issues #17, #23)
++
++
++1.0.2 (2011-11-04)
++------------------
++
++* Fixed typo in ``VerifiedHTTPSConnection`` which would only present as a bug if
++  you're using the object manually. (Thanks pyos)
++
++* Made RecentlyUsedContainer (and consequently PoolManager) more thread-safe by
++  wrapping the access log in a mutex. (Thanks @christer)
++
++* Made RecentlyUsedContainer more dict-like (corrected ``__delitem__`` and
++  ``__getitem__`` behaviour), with tests. Shouldn't affect core urllib3 code.
++
++
++1.0.1 (2011-10-10)
++------------------
++
++* Fixed a bug where the same connection would get returned into the pool twice,
++  causing extraneous "HttpConnectionPool is full" log warnings.
++
++
++1.0 (2011-10-08)
++----------------
++
++* Added ``PoolManager`` with LRU expiration of connections (tested and
++  documented).
++* Added ``ProxyManager`` (needs tests, docs, and confirmation that it works
++  with HTTPS proxies).
++* Added optional partial-read support for responses when
++  ``preload_content=False``. You can now make requests and just read the headers
++  without loading the content.
++* Made response decoding optional (default on, same as before).
++* Added optional explicit boundary string for ``encode_multipart_formdata``.
++* Convenience request methods are now inherited from ``RequestMethods``. Old
++  helpers like ``get_url`` and ``post_url`` should be abandoned in favour of
++  the new ``request(method, url, ...)``.
++* Refactored code to be even more decoupled, reusable, and extendable.
++* License header added to ``.py`` files.
++* Embiggened the documentation: Lots of Sphinx-friendly docstrings in the code
++  and docs in ``docs/`` and on https://urllib3.readthedocs.io/.
++* Embettered all the things!
++* Started writing this file.
++
++
++0.4.1 (2011-07-17)
++------------------
++
++* Minor bug fixes, code cleanup.
++
++
++0.4 (2011-03-01)
++----------------
++
++* Better unicode support.
++* Added ``VerifiedHTTPSConnection``.
++* Added ``NTLMConnectionPool`` in contrib.
++* Minor improvements.
++
++
++0.3.1 (2010-07-13)
++------------------
++
++* Added ``assert_host_name`` optional parameter. Now compatible with proxies.
++
++
++0.3 (2009-12-10)
++----------------
++
++* Added HTTPS support.
++* Minor bug fixes.
++* Refactored, broken backwards compatibility with 0.2.
++* API to be treated as stable from this version forward.
++
++
++0.2 (2008-11-17)
++----------------
++
++* Added unit tests.
++* Bug fixes.
++
++
++0.1 (2008-11-16)
++----------------
++
++* First release.
++
++
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD b/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD
+@@ -0,0 +1,44 @@
++urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
++urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
++urllib3/_version.py,sha256=H0vLQ8PY350EPZlZQa8ri0tEjVS-xhGdQOHcU360-0A,63
++urllib3/connection.py,sha256=BdaUSNpGzO0zq28i9MhOXb6QZspeVdVrYtjnkk2Eqg4,18396
++urllib3/connectionpool.py,sha256=IKoeuJZY9YAYm0GK4q-MXAhyXW0M_FnvabYaNsDIR-E,37133
++urllib3/exceptions.py,sha256=lNrKC5J8zeBXIu9SSKSNb7cLi8iXl9ARu9DHD2SflZM,7810
++urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
++urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
++urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763
++urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
++urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203
++urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
++urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
++urllib3/contrib/appengine.py,sha256=7Pxb0tKfDB_LTGPERiswH0qomhDoUUOo5kwybAKLQyE,11010
++urllib3/contrib/ntlmpool.py,sha256=6I95h1_71fzxmoMSNtY0gB8lnyCoVtP_DpqFGj14fdU,4160
++urllib3/contrib/pyopenssl.py,sha256=vgh6j52w9xgwq-3R2kfB5M2JblQATJfKAK3lIAc1kSg,16778
++urllib3/contrib/securetransport.py,sha256=KxGPZk8d4YepWm7Rc-SBt1XrzIfnLKc8JkUVV75XzgE,34286
++urllib3/contrib/socks.py,sha256=DcRjM2l0rQMIyhYrN6r-tnVkY6ZTDxHJlM8_usAkGCA,7097
++urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
++urllib3/contrib/_securetransport/bindings.py,sha256=E1_7ScsgOchfxneozbAueK7ziCwF35fna4DuDCYJ9_o,17637
++urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908
++urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
++urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
++urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
++urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
++urllib3/packages/ssl_match_hostname/__init__.py,sha256=zppezdEQdpGsYerI6mV6MfUYy495JV4mcOWC_GgbljU,757
++urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
++urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
++urllib3/util/connection.py,sha256=21B-LX0c8fkxPDssyHCaK0pCnmrKmhltg5EoouHiAPU,4910
++urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604
++urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
++urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123
++urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
++urllib3/util/retry.py,sha256=tn168HDMUynFmXRP-uVaLRUOlbTEJikoB1RuZdwfCes,21366
++urllib3/util/ssl_.py,sha256=cUsmU604z2zAOZcaXDpINXOokQ1RtlJMe96TBDkaJp0,16199
++urllib3/util/ssltransport.py,sha256=IvGQvs9YWkf4jzfqVjTu_UWjwAUgPn5ActajW8VLz6A,6908
++urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
++urllib3/util/url.py,sha256=LWfLSlI4l2FmUMKfCkElCaW10-0N-sJDT9bxaDZJkjs,13964
++urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
++urllib3-1.26.0.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
++urllib3-1.26.0.dist-info/METADATA,sha256=Wghdt6nLf9HfZHhWj8Dpgz4n9vGRqXYhdIwJRPgki6M,42629
++urllib3-1.26.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
++urllib3-1.26.0.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
++urllib3-1.26.0.dist-info/RECORD,,
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL b/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL
+@@ -0,0 +1,6 @@
++Wheel-Version: 1.0
++Generator: bdist_wheel (0.35.1)
++Root-Is-Purelib: true
++Tag: py2-none-any
++Tag: py3-none-any
++
+diff --git a/third_party/python/urllib3/urllib3-1.25.9.dist-info/top_level.txt b/third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt
+rename from third_party/python/urllib3/urllib3-1.25.9.dist-info/top_level.txt
+rename to third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt
+diff --git a/third_party/python/urllib3/urllib3/__init__.py b/third_party/python/urllib3/urllib3/__init__.py
+--- a/third_party/python/urllib3/urllib3/__init__.py
++++ b/third_party/python/urllib3/urllib3/__init__.py
+@@ -1,33 +1,32 @@
+ """
+-urllib3 - Thread-safe connection pooling and re-using.
++Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
+ """
+ from __future__ import absolute_import
++
++# Set default logging handler to avoid "No handler found" warnings.
++import logging
+ import warnings
+-
+-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
++from logging import NullHandler
+ 
+ from . import exceptions
++from ._version import __version__
++from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
+ from .filepost import encode_multipart_formdata
+ from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+ from .response import HTTPResponse
+ from .util.request import make_headers
+-from .util.url import get_host
++from .util.retry import Retry
+ from .util.timeout import Timeout
+-from .util.retry import Retry
+-
+-
+-# Set default logging handler to avoid "No handler found" warnings.
+-import logging
+-from logging import NullHandler
++from .util.url import get_host
+ 
+ __author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
+ __license__ = "MIT"
+-__version__ = "1.25.9"
++__version__ = __version__
+ 
+ __all__ = (
+     "HTTPConnectionPool",
+     "HTTPSConnectionPool",
+     "PoolManager",
+     "ProxyManager",
+     "HTTPResponse",
+     "Retry",
+diff --git a/third_party/python/urllib3/urllib3/_collections.py b/third_party/python/urllib3/urllib3/_collections.py
+--- a/third_party/python/urllib3/urllib3/_collections.py
++++ b/third_party/python/urllib3/urllib3/_collections.py
+@@ -12,19 +12,20 @@ except ImportError:  # Platform-specific
+         def __enter__(self):
+             pass
+ 
+         def __exit__(self, exc_type, exc_value, traceback):
+             pass
+ 
+ 
+ from collections import OrderedDict
++
+ from .exceptions import InvalidHeader
+-from .packages.six import iterkeys, itervalues, PY3
+-
++from .packages import six
++from .packages.six import iterkeys, itervalues
+ 
+ __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
+ 
+ 
+ _Null = object()
+ 
+ 
+ class RecentlyUsedContainer(MutableMapping):
+@@ -169,33 +170,33 @@ class HTTPHeaderDict(MutableMapping):
+             other = type(self)(other)
+         return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
+             (k.lower(), v) for k, v in other.itermerged()
+         )
+ 
+     def __ne__(self, other):
+         return not self.__eq__(other)
+ 
+-    if not PY3:  # Python 2
++    if six.PY2:  # Python 2
+         iterkeys = MutableMapping.iterkeys
+         itervalues = MutableMapping.itervalues
+ 
+     __marker = object()
+ 
+     def __len__(self):
+         return len(self._container)
+ 
+     def __iter__(self):
+         # Only provide the originally cased names
+         for vals in self._container.values():
+             yield vals[0]
+ 
+     def pop(self, key, default=__marker):
+         """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
+-          If key is not found, d is returned if given, otherwise KeyError is raised.
++        If key is not found, d is returned if given, otherwise KeyError is raised.
+         """
+         # Using the MutableMapping function directly fails due to the private marker.
+         # Using ordinary dict.pop would expose the internal structures.
+         # So let's reinvent the wheel.
+         try:
+             value = self[key]
+         except KeyError:
+             if default is self.__marker:
+diff --git a/third_party/python/urllib3/urllib3/_version.py b/third_party/python/urllib3/urllib3/_version.py
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3/_version.py
+@@ -0,0 +1,2 @@
++# This file is protected via CODEOWNERS
++__version__ = "1.26.0"
+diff --git a/third_party/python/urllib3/urllib3/connection.py b/third_party/python/urllib3/urllib3/connection.py
+--- a/third_party/python/urllib3/urllib3/connection.py
++++ b/third_party/python/urllib3/urllib3/connection.py
+@@ -1,19 +1,23 @@
+ from __future__ import absolute_import
+-import re
++
+ import datetime
+ import logging
+ import os
++import re
+ import socket
+-from socket import error as SocketError, timeout as SocketTimeout
+ import warnings
++from socket import error as SocketError
++from socket import timeout as SocketTimeout
++
+ from .packages import six
+ from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
+ from .packages.six.moves.http_client import HTTPException  # noqa: F401
++from .util.proxy import create_proxy_ssl_context
+ 
+ try:  # Compiled with SSL?
+     import ssl
+ 
+     BaseSSLError = ssl.SSLError
+ except (ImportError, AttributeError):  # Platform-specific: No SSL.
+     ssl = None
+ 
+@@ -25,76 +29,77 @@ try:
+     # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
+     ConnectionError = ConnectionError
+ except NameError:
+     # Python 2
+     class ConnectionError(Exception):
+         pass
+ 
+ 
++try:  # Python 3:
++    # Not a no-op, we're adding this to the namespace so it can be imported.
++    BrokenPipeError = BrokenPipeError
++except NameError:  # Python 2:
++
++    class BrokenPipeError(Exception):
++        pass
++
++
++from ._version import __version__
+ from .exceptions import (
++    ConnectTimeoutError,
+     NewConnectionError,
+-    ConnectTimeoutError,
+     SubjectAltNameWarning,
+     SystemTimeWarning,
+ )
+-from .packages.ssl_match_hostname import match_hostname, CertificateError
+-
++from .packages.ssl_match_hostname import CertificateError, match_hostname
++from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
+ from .util.ssl_ import (
+-    resolve_cert_reqs,
+-    resolve_ssl_version,
+     assert_fingerprint,
+     create_urllib3_context,
++    resolve_cert_reqs,
++    resolve_ssl_version,
+     ssl_wrap_socket,
+ )
+ 
+-
+-from .util import connection
+-
+-from ._collections import HTTPHeaderDict
+-
+ log = logging.getLogger(__name__)
+ 
+ port_by_scheme = {"http": 80, "https": 443}
+ 
+ # When it comes time to update this value as a part of regular maintenance
+ # (ie test_recent_date is failing) update it to ~6 months before the current date.
+ RECENT_DATE = datetime.date(2019, 1, 1)
+ 
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+ 
+ 
+-class DummyConnection(object):
+-    """Used to detect a failed ConnectionCls import."""
+-
+-    pass
+-
+-
+ class HTTPConnection(_HTTPConnection, object):
+     """
+-    Based on httplib.HTTPConnection but provides an extra constructor
++    Based on :class:`http.client.HTTPConnection` but provides an extra constructor
+     backwards-compatibility layer between older and newer Pythons.
+ 
+     Additional keyword parameters are used to configure attributes of the connection.
+     Accepted parameters include:
+ 
+-      - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
+-      - ``source_address``: Set the source address for the current connection.
+-      - ``socket_options``: Set specific options on the underlying socket. If not specified, then
+-        defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
+-        Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
++    - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
++    - ``source_address``: Set the source address for the current connection.
++    - ``socket_options``: Set specific options on the underlying socket. If not specified, then
++      defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
++      Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
+ 
+-        For example, if you wish to enable TCP Keep Alive in addition to the defaults,
+-        you might pass::
++      For example, if you wish to enable TCP Keep Alive in addition to the defaults,
++      you might pass:
++
++      .. code-block:: python
+ 
+-            HTTPConnection.default_socket_options + [
+-                (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
+-            ]
++         HTTPConnection.default_socket_options + [
++             (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
++         ]
+ 
+-        Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
++      Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
+     """
+ 
+     default_port = port_by_scheme["http"]
+ 
+     #: Disable Nagle's algorithm by default.
+     #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+     default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+ 
+@@ -107,16 +112,20 @@ class HTTPConnection(_HTTPConnection, ob
+ 
+         # Pre-set source_address.
+         self.source_address = kw.get("source_address")
+ 
+         #: The socket options provided by the user. If no options are
+         #: provided, we use the default options.
+         self.socket_options = kw.pop("socket_options", self.default_socket_options)
+ 
++        # Proxy options provided by the user.
++        self.proxy = kw.pop("proxy", None)
++        self.proxy_config = kw.pop("proxy_config", None)
++
+         _HTTPConnection.__init__(self, *args, **kw)
+ 
+     @property
+     def host(self):
+         """
+         Getter method to remove any trailing dots that indicate the hostname is an FQDN.
+ 
+         In general, SSL certificates don't include the trailing dot indicating a
+@@ -139,17 +148,17 @@ class HTTPConnection(_HTTPConnection, ob
+         Setter for the `host` property.
+ 
+         We assume that only urllib3 uses the _dns_host attribute; httplib itself
+         only uses `host`, and it seems reasonable that other libraries follow suit.
+         """
+         self._dns_host = value
+ 
+     def _new_conn(self):
+-        """ Establish a socket connection and set nodelay settings on it.
++        """Establish a socket connection and set nodelay settings on it.
+ 
+         :return: New socket connection.
+         """
+         extra_kw = {}
+         if self.source_address:
+             extra_kw["source_address"] = self.source_address
+ 
+         if self.socket_options:
+@@ -169,85 +178,120 @@ class HTTPConnection(_HTTPConnection, ob
+ 
+         except SocketError as e:
+             raise NewConnectionError(
+                 self, "Failed to establish a new connection: %s" % e
+             )
+ 
+         return conn
+ 
++    def _is_using_tunnel(self):
++        # Google App Engine's httplib does not define _tunnel_host
++        return getattr(self, "_tunnel_host", None)
++
+     def _prepare_conn(self, conn):
+         self.sock = conn
+-        # Google App Engine's httplib does not define _tunnel_host
+-        if getattr(self, "_tunnel_host", None):
++        if self._is_using_tunnel():
+             # TODO: Fix tunnel so it doesn't depend on self.sock state.
+             self._tunnel()
+             # Mark this connection as not reusable
+             self.auto_open = 0
+ 
+     def connect(self):
+         conn = self._new_conn()
+         self._prepare_conn(conn)
+ 
+     def putrequest(self, method, url, *args, **kwargs):
+-        """Send a request to the server"""
++        """"""
++        # Empty docstring because the indentation of CPython's implementation
++        # is broken but we don't want this method in our documentation.
+         match = _CONTAINS_CONTROL_CHAR_RE.search(method)
+         if match:
+             raise ValueError(
+                 "Method cannot contain non-token characters %r (found at least %r)"
+                 % (method, match.group())
+             )
+ 
+         return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
+ 
++    def putheader(self, header, *values):
++        """"""
++        if SKIP_HEADER not in values:
++            _HTTPConnection.putheader(self, header, *values)
++        elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
++            raise ValueError(
++                "urllib3.util.SKIP_HEADER only supports '%s'"
++                % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
++            )
++
++    def request(self, method, url, body=None, headers=None):
++        if headers is None:
++            headers = {}
++        else:
++            # Avoid modifying the headers passed into .request()
++            headers = headers.copy()
++        if "user-agent" not in (k.lower() for k in headers):
++            headers["User-Agent"] = _get_default_user_agent()
++        super(HTTPConnection, self).request(method, url, body=body, headers=headers)
++
+     def request_chunked(self, method, url, body=None, headers=None):
+         """
+         Alternative to the common request method, which sends the
+         body with chunked encoding and not as one block
+         """
+-        headers = HTTPHeaderDict(headers if headers is not None else {})
+-        skip_accept_encoding = "accept-encoding" in headers
+-        skip_host = "host" in headers
++        headers = headers or {}
++        header_keys = set([six.ensure_str(k.lower()) for k in headers])
++        skip_accept_encoding = "accept-encoding" in header_keys
++        skip_host = "host" in header_keys
+         self.putrequest(
+             method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
+         )
++        if "user-agent" not in header_keys:
++            self.putheader("User-Agent", _get_default_user_agent())
+         for header, value in headers.items():
+             self.putheader(header, value)
+         if "transfer-encoding" not in headers:
+             self.putheader("Transfer-Encoding", "chunked")
+         self.endheaders()
+ 
+         if body is not None:
+             stringish_types = six.string_types + (bytes,)
+             if isinstance(body, stringish_types):
+                 body = (body,)
+             for chunk in body:
+                 if not chunk:
+                     continue
+                 if not isinstance(chunk, bytes):
+                     chunk = chunk.encode("utf8")
+                 len_str = hex(len(chunk))[2:]
+-                self.send(len_str.encode("utf-8"))
+-                self.send(b"\r\n")
+-                self.send(chunk)
+-                self.send(b"\r\n")
++                to_send = bytearray(len_str.encode())
++                to_send += b"\r\n"
++                to_send += chunk
++                to_send += b"\r\n"
++                self.send(to_send)
+ 
+         # After the if clause, to always have a closed body
+         self.send(b"0\r\n\r\n")
+ 
+ 
+ class HTTPSConnection(HTTPConnection):
++    """
++    Many of the parameters to this constructor are passed to the underlying SSL
++    socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
++    """
++
+     default_port = port_by_scheme["https"]
+ 
+     cert_reqs = None
+     ca_certs = None
+     ca_cert_dir = None
+     ca_cert_data = None
+     ssl_version = None
+     assert_fingerprint = None
++    tls_in_tls_required = False
+ 
+     def __init__(
+         self,
+         host,
+         port=None,
+         key_file=None,
+         cert_file=None,
+         key_password=None,
+@@ -302,20 +346,25 @@ class HTTPSConnection(HTTPConnection):
+         self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+         self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+         self.ca_cert_data = ca_cert_data
+ 
+     def connect(self):
+         # Add certificate verification
+         conn = self._new_conn()
+         hostname = self.host
++        tls_in_tls = False
+ 
+-        # Google App Engine's httplib does not define _tunnel_host
+-        if getattr(self, "_tunnel_host", None):
++        if self._is_using_tunnel():
++            if self.tls_in_tls_required:
++                conn = self._connect_tls_proxy(hostname, conn)
++                tls_in_tls = True
++
+             self.sock = conn
++
+             # Calls self._set_hostport(), so self.host is
+             # self._tunnel_host below.
+             self._tunnel()
+             # Mark this connection as not reusable
+             self.auto_open = 0
+ 
+             # Override the host with the one we're requesting data from.
+             hostname = self._tunnel_host
+@@ -363,18 +412,36 @@ class HTTPSConnection(HTTPConnection):
+             keyfile=self.key_file,
+             certfile=self.cert_file,
+             key_password=self.key_password,
+             ca_certs=self.ca_certs,
+             ca_cert_dir=self.ca_cert_dir,
+             ca_cert_data=self.ca_cert_data,
+             server_hostname=server_hostname,
+             ssl_context=context,
++            tls_in_tls=tls_in_tls,
+         )
+ 
++        # If we're using all defaults and the connection
++        # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
++        # for the host.
++        if (
++            default_ssl_context
++            and self.ssl_version is None
++            and hasattr(self.sock, "version")
++            and self.sock.version() in {"TLSv1", "TLSv1.1"}
++        ):
++            warnings.warn(
++                "Negotiating TLSv1/TLSv1.1 by default is deprecated "
++                "and will be disabled in urllib3 v2.0.0. Connecting to "
++                "'%s' with '%s' can be enabled by explicitly opting-in "
++                "with 'ssl_version'" % (self.host, self.sock.version()),
++                DeprecationWarning,
++            )
++
+         if self.assert_fingerprint:
+             assert_fingerprint(
+                 self.sock.getpeercert(binary_form=True), self.assert_fingerprint
+             )
+         elif (
+             context.verify_mode != ssl.CERT_NONE
+             and not getattr(context, "check_hostname", False)
+             and self.assert_hostname is not False
+@@ -395,29 +462,73 @@ class HTTPSConnection(HTTPConnection):
+                 )
+             _match_hostname(cert, self.assert_hostname or server_hostname)
+ 
+         self.is_verified = (
+             context.verify_mode == ssl.CERT_REQUIRED
+             or self.assert_fingerprint is not None
+         )
+ 
++    def _connect_tls_proxy(self, hostname, conn):
++        """
++        Establish a TLS connection to the proxy using the provided SSL context.
++        """
++        proxy_config = self.proxy_config
++        ssl_context = proxy_config.ssl_context
++        if ssl_context:
++            # If the user provided a proxy context, we assume CA and client
++            # certificates have already been set
++            return ssl_wrap_socket(
++                sock=conn,
++                server_hostname=hostname,
++                ssl_context=ssl_context,
++            )
++
++        ssl_context = create_proxy_ssl_context(
++            self.ssl_version,
++            self.cert_reqs,
++            self.ca_certs,
++            self.ca_cert_dir,
++            self.ca_cert_data,
++        )
++
++        # If no cert was provided, use only the default options for server
++        # certificate validation
++        return ssl_wrap_socket(
++            sock=conn,
++            ca_certs=self.ca_certs,
++            ca_cert_dir=self.ca_cert_dir,
++            ca_cert_data=self.ca_cert_data,
++            server_hostname=hostname,
++            ssl_context=ssl_context,
++        )
++
+ 
+ def _match_hostname(cert, asserted_hostname):
+     try:
+         match_hostname(cert, asserted_hostname)
+     except CertificateError as e:
+         log.warning(
+             "Certificate did not match expected hostname: %s. Certificate: %s",
+             asserted_hostname,
+             cert,
+         )
+         # Add cert to exception and reraise so client code can inspect
+         # the cert when catching the exception, if they want to
+         e._peer_cert = cert
+         raise
+ 
+ 
++def _get_default_user_agent():
++    return "python-urllib3/%s" % __version__
++
++
++class DummyConnection(object):
++    """Used to detect a failed ConnectionCls import."""
++
++    pass
++
++
+ if not ssl:
+     HTTPSConnection = DummyConnection  # noqa: F811
+ 
+ 
+ VerifiedHTTPSConnection = HTTPSConnection
+diff --git a/third_party/python/urllib3/urllib3/connectionpool.py b/third_party/python/urllib3/urllib3/connectionpool.py
+--- a/third_party/python/urllib3/urllib3/connectionpool.py
++++ b/third_party/python/urllib3/urllib3/connectionpool.py
+@@ -1,62 +1,58 @@
+ from __future__ import absolute_import
++
+ import errno
+ import logging
++import socket
+ import sys
+ import warnings
++from socket import error as SocketError
++from socket import timeout as SocketTimeout
+ 
+-from socket import error as SocketError, timeout as SocketTimeout
+-import socket
+-
+-
++from .connection import (
++    BaseSSLError,
++    BrokenPipeError,
++    DummyConnection,
++    HTTPConnection,
++    HTTPException,
++    HTTPSConnection,
++    VerifiedHTTPSConnection,
++    port_by_scheme,
++)
+ from .exceptions import (
+     ClosedPoolError,
+-    ProtocolError,
+     EmptyPoolError,
+     HeaderParsingError,
+     HostChangedError,
++    InsecureRequestWarning,
+     LocationValueError,
+     MaxRetryError,
++    NewConnectionError,
++    ProtocolError,
+     ProxyError,
+     ReadTimeoutError,
+     SSLError,
+     TimeoutError,
+-    InsecureRequestWarning,
+-    NewConnectionError,
+ )
+-from .packages.ssl_match_hostname import CertificateError
+ from .packages import six
+ from .packages.six.moves import queue
+-from .connection import (
+-    port_by_scheme,
+-    DummyConnection,
+-    HTTPConnection,
+-    HTTPSConnection,
+-    VerifiedHTTPSConnection,
+-    HTTPException,
+-    BaseSSLError,
+-)
++from .packages.ssl_match_hostname import CertificateError
+ from .request import RequestMethods
+ from .response import HTTPResponse
+-
+ from .util.connection import is_connection_dropped
++from .util.proxy import connection_requires_http_tunnel
++from .util.queue import LifoQueue
+ from .util.request import set_file_position
+ from .util.response import assert_header_parsing
+ from .util.retry import Retry
+ from .util.timeout import Timeout
+-from .util.url import (
+-    get_host,
+-    parse_url,
+-    Url,
+-    _normalize_host as normalize_host,
+-    _encode_target,
+-)
+-from .util.queue import LifoQueue
+-
++from .util.url import Url, _encode_target
++from .util.url import _normalize_host as normalize_host
++from .util.url import get_host, parse_url
+ 
+ xrange = six.moves.xrange
+ 
+ log = logging.getLogger(__name__)
+ 
+ _Default = object()
+ 
+ 
+@@ -106,26 +102,26 @@ class ConnectionPool(object):
+ 
+ 
+ class HTTPConnectionPool(ConnectionPool, RequestMethods):
+     """
+     Thread-safe connection pool for one host.
+ 
+     :param host:
+         Host used for this HTTP Connection (e.g. "localhost"), passed into
+-        :class:`httplib.HTTPConnection`.
++        :class:`http.client.HTTPConnection`.
+ 
+     :param port:
+         Port used for this HTTP Connection (None is equivalent to 80), passed
+-        into :class:`httplib.HTTPConnection`.
++        into :class:`http.client.HTTPConnection`.
+ 
+     :param strict:
+         Causes BadStatusLine to be raised if the status line can't be parsed
+         as a valid HTTP/1.0 or 1.1 status line, passed into
+-        :class:`httplib.HTTPConnection`.
++        :class:`http.client.HTTPConnection`.
+ 
+         .. note::
+            Only works in Python 2. This parameter is ignored in Python 3.
+ 
+     :param timeout:
+         Socket timeout in seconds for each individual connection. This can
+         be a float or integer, which sets the timeout for the HTTP request,
+         or an instance of :class:`urllib3.util.Timeout` which gives you more
+@@ -149,21 +145,21 @@ class HTTPConnectionPool(ConnectionPool,
+         Headers to include with all requests, unless other headers are given
+         explicitly.
+ 
+     :param retries:
+         Retry configuration to use by default with requests in this pool.
+ 
+     :param _proxy:
+         Parsed proxy URL, should not be used directly, instead, see
+-        :class:`urllib3.connectionpool.ProxyManager`"
++        :class:`urllib3.ProxyManager`
+ 
+     :param _proxy_headers:
+         A dictionary with proxy headers, should not be used directly,
+-        instead, see :class:`urllib3.connectionpool.ProxyManager`"
++        instead, see :class:`urllib3.ProxyManager`
+ 
+     :param \\**conn_kw:
+         Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
+         :class:`urllib3.connection.HTTPSConnection` instances.
+     """
+ 
+     scheme = "http"
+     ConnectionCls = HTTPConnection
+@@ -176,16 +172,17 @@ class HTTPConnectionPool(ConnectionPool,
+         strict=False,
+         timeout=Timeout.DEFAULT_TIMEOUT,
+         maxsize=1,
+         block=False,
+         headers=None,
+         retries=None,
+         _proxy=None,
+         _proxy_headers=None,
++        _proxy_config=None,
+         **conn_kw
+     ):
+         ConnectionPool.__init__(self, host, port)
+         RequestMethods.__init__(self, headers)
+ 
+         self.strict = strict
+ 
+         if not isinstance(timeout, Timeout):
+@@ -197,32 +194,36 @@ class HTTPConnectionPool(ConnectionPool,
+         self.timeout = timeout
+         self.retries = retries
+ 
+         self.pool = self.QueueCls(maxsize)
+         self.block = block
+ 
+         self.proxy = _proxy
+         self.proxy_headers = _proxy_headers or {}
++        self.proxy_config = _proxy_config
+ 
+         # Fill the queue up so that doing get() on it will block properly
+         for _ in xrange(maxsize):
+             self.pool.put(None)
+ 
+         # These are mostly for testing and debugging purposes.
+         self.num_connections = 0
+         self.num_requests = 0
+         self.conn_kw = conn_kw
+ 
+         if self.proxy:
+             # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+             # We cannot know if the user has added default socket options, so we cannot replace the
+             # list.
+             self.conn_kw.setdefault("socket_options", [])
+ 
++            self.conn_kw["proxy"] = self.proxy
++            self.conn_kw["proxy_config"] = self.proxy_config
++
+     def _new_conn(self):
+         """
+         Return a fresh :class:`HTTPConnection`.
+         """
+         self.num_connections += 1
+         log.debug(
+             "Starting new HTTP connection (%d): %s:%s",
+             self.num_connections,
+@@ -267,17 +268,17 @@ class HTTPConnectionPool(ConnectionPool,
+             pass  # Oh well, we'll create a new connection then
+ 
+         # If this is a persistent connection, check if it got disconnected
+         if conn and is_connection_dropped(conn):
+             log.debug("Resetting dropped connection: %s", self.host)
+             conn.close()
+             if getattr(conn, "auto_open", 1) == 0:
+                 # This is a proxied connection that has been mutated by
+-                # httplib._tunnel() and cannot be reused (since it would
++                # http.client._tunnel() and cannot be reused (since it would
+                 # attempt to bypass the proxy)
+                 conn = None
+ 
+         return conn or self._new_conn()
+ 
+     def _put_conn(self, conn):
+         """
+         Put a connection back into the pool.
+@@ -379,22 +380,40 @@ class HTTPConnectionPool(ConnectionPool,
+         # Trigger any extra validation we need to do.
+         try:
+             self._validate_conn(conn)
+         except (SocketTimeout, BaseSSLError) as e:
+             # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+             self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+             raise
+ 
+-        # conn.request() calls httplib.*.request, not the method in
++        # conn.request() calls http.client.*.request, not the method in
+         # urllib3.request. It also calls makefile (recv) on the socket.
+-        if chunked:
+-            conn.request_chunked(method, url, **httplib_request_kw)
+-        else:
+-            conn.request(method, url, **httplib_request_kw)
++        try:
++            if chunked:
++                conn.request_chunked(method, url, **httplib_request_kw)
++            else:
++                conn.request(method, url, **httplib_request_kw)
++
++        # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
++        # legitimately able to close the connection after sending a valid response.
++        # With this behaviour, the received response is still readable.
++        except BrokenPipeError:
++            # Python 3
++            pass
++        except IOError as e:
++            # Python 2 and macOS/Linux
++            # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
++            # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
++            if e.errno not in {
++                errno.EPIPE,
++                errno.ESHUTDOWN,
++                errno.EPROTOTYPE,
++            }:
++                raise
+ 
+         # Reset the timeout for the recv() on the socket
+         read_timeout = timeout_obj.read_timeout
+ 
+         # App Engine doesn't have a sock attr
+         if getattr(conn, "sock", None):
+             # In Python 3 socket.py will catch EAGAIN and return None when you
+             # try and read into the file pointer created by http.client, which
+@@ -527,20 +546,22 @@ class HTTPConnectionPool(ConnectionPool,
+            `release_conn` will only behave as expected if
+            `preload_content=False` because we want to make
+            `preload_content=False` the default behaviour someday soon without
+            breaking backwards compatibility.
+ 
+         :param method:
+             HTTP request method (such as GET, POST, PUT, etc.)
+ 
++        :param url:
++            The URL to perform the request on.
++
+         :param body:
+-            Data to send in the request body (useful for creating
+-            POST requests, see HTTPConnectionPool.post_url for
+-            more convenience).
++            Data to send in the request body, either :class:`str`, :class:`bytes`,
++            an iterable of :class:`str`/:class:`bytes`, or a file-like object.
+ 
+         :param headers:
+             Dictionary of custom headers to send, such as User-Agent,
+             If-None-Match, etc. If None, pool headers are used. If provided,
+             these headers completely replace any pool-specific headers.
+ 
+         :param retries:
+             Configure the number of retries to allow before raising a
+@@ -560,17 +581,17 @@ class HTTPConnectionPool(ConnectionPool,
+ 
+         :param redirect:
+             If True, automatically handle redirects (status codes 301, 302,
+             303, 307, 308). Each redirect counts as a retry. Disabling retries
+             will disable redirect, too.
+ 
+         :param assert_same_host:
+             If ``True``, will make sure that the host of the pool requests is
+-            consistent else will raise HostChangedError. When False, you can
++            consistent else will raise HostChangedError. When ``False``, you can
+             use the pool on an HTTP proxy and request foreign hosts.
+ 
+         :param timeout:
+             If specified, overrides the default timeout for this one
+             request. It may be a float (in seconds) or an instance of
+             :class:`urllib3.util.Timeout`.
+ 
+         :param pool_timeout:
+@@ -597,16 +618,20 @@ class HTTPConnectionPool(ConnectionPool,
+             Position to seek to in file-like body in the event of a retry or
+             redirect. Typically this won't need to be set because urllib3 will
+             auto-populate the value when needed.
+ 
+         :param \\**response_kw:
+             Additional parameters are passed to
+             :meth:`urllib3.response.HTTPResponse.from_httplib`
+         """
++
++        parsed_url = parse_url(url)
++        destination_scheme = parsed_url.scheme
++
+         if headers is None:
+             headers = self.headers
+ 
+         if not isinstance(retries, Retry):
+             retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
+ 
+         if release_conn is None:
+             release_conn = response_kw.get("preload_content", True)
+@@ -614,35 +639,39 @@ class HTTPConnectionPool(ConnectionPool,
+         # Check host
+         if assert_same_host and not self.is_same_host(url):
+             raise HostChangedError(self, url, retries)
+ 
+         # Ensure that the URL we're connecting to is properly encoded
+         if url.startswith("/"):
+             url = six.ensure_str(_encode_target(url))
+         else:
+-            url = six.ensure_str(parse_url(url).url)
++            url = six.ensure_str(parsed_url.url)
+ 
+         conn = None
+ 
+         # Track whether `conn` needs to be released before
+         # returning/raising/recursing. Update this variable if necessary, and
+         # leave `release_conn` constant throughout the function. That way, if
+         # the function recurses, the original value of `release_conn` will be
+         # passed down into the recursive call, and its value will be respected.
+         #
+         # See issue #651 [1] for details.
+         #
+         # [1] <https://github.com/urllib3/urllib3/issues/651>
+         release_this_conn = release_conn
+ 
+-        # Merge the proxy headers. Only do this in HTTP. We have to copy the
+-        # headers dict so we can safely change it without those changes being
+-        # reflected in anyone else's copy.
+-        if self.scheme == "http":
++        http_tunnel_required = connection_requires_http_tunnel(
++            self.proxy, self.proxy_config, destination_scheme
++        )
++
++        # Merge the proxy headers. Only done when not using HTTP CONNECT. We
++        # have to copy the headers dict so we can safely change it without those
++        # changes being reflected in anyone else's copy.
++        if not http_tunnel_required:
+             headers = headers.copy()
+             headers.update(self.proxy_headers)
+ 
+         # Must keep the exception bound to a separate variable or else Python 3
+         # complains about UnboundLocalError.
+         err = None
+ 
+         # Keep track of whether we cleanly exited the except block. This
+@@ -658,17 +687,17 @@ class HTTPConnectionPool(ConnectionPool,
+             timeout_obj = self._get_timeout(timeout)
+             conn = self._get_conn(timeout=pool_timeout)
+ 
+             conn.timeout = timeout_obj.connect_timeout
+ 
+             is_new_proxy_conn = self.proxy is not None and not getattr(
+                 conn, "sock", None
+             )
+-            if is_new_proxy_conn:
++            if is_new_proxy_conn and http_tunnel_required:
+                 self._prepare_proxy(conn)
+ 
+             # Make the request on the httplib connection object.
+             httplib_response = self._make_request(
+                 conn,
+                 method,
+                 url,
+                 timeout=timeout_obj,
+@@ -693,19 +722,21 @@ class HTTPConnectionPool(ConnectionPool,
+                 connection=response_conn,
+                 retries=retries,
+                 **response_kw
+             )
+ 
+             # Everything went great!
+             clean_exit = True
+ 
+-        except queue.Empty:
+-            # Timed out by queue.
+-            raise EmptyPoolError(self, "No pool connections are available.")
++        except EmptyPoolError:
++            # Didn't get a connection from the pool, no need to clean up
++            clean_exit = True
++            release_this_conn = False
++            raise
+ 
+         except (
+             TimeoutError,
+             HTTPException,
+             SocketError,
+             ProtocolError,
+             BaseSSLError,
+             SSLError,
+@@ -830,21 +861,17 @@ class HTTPConnectionPool(ConnectionPool,
+ 
+         return response
+ 
+ 
+ class HTTPSConnectionPool(HTTPConnectionPool):
+     """
+     Same as :class:`.HTTPConnectionPool`, but HTTPS.
+ 
+-    When Python is compiled with the :mod:`ssl` module, then
+-    :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
+-    instead of :class:`.HTTPSConnection`.
+-
+-    :class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
++    :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
+     ``assert_hostname`` and ``host`` in this order to verify connections.
+     If ``assert_hostname`` is False, no verification is done.
+ 
+     The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
+     ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
+     is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
+     the connection socket into an SSL socket.
+     """
+@@ -918,25 +945,32 @@ class HTTPSConnectionPool(HTTPConnection
+                 assert_hostname=self.assert_hostname,
+                 assert_fingerprint=self.assert_fingerprint,
+             )
+             conn.ssl_version = self.ssl_version
+         return conn
+ 
+     def _prepare_proxy(self, conn):
+         """
+-        Establish tunnel connection early, because otherwise httplib
+-        would improperly set Host: header to proxy's IP:port.
++        Establishes a tunnel connection through HTTP CONNECT.
++
++        Tunnel connection is established early because otherwise httplib would
++        improperly set Host: header to proxy's IP:port.
+         """
++
+         conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
++
++        if self.proxy.scheme == "https":
++            conn.tls_in_tls_required = True
++
+         conn.connect()
+ 
+     def _new_conn(self):
+         """
+-        Return a fresh :class:`httplib.HTTPSConnection`.
++        Return a fresh :class:`http.client.HTTPSConnection`.
+         """
+         self.num_connections += 1
+         log.debug(
+             "Starting new HTTPS connection (%d): %s:%s",
+             self.num_connections,
+             self.host,
+             self.port or "443",
+         )
+diff --git a/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py b/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
+--- a/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
++++ b/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
+@@ -27,51 +27,70 @@ license and by oscrypto's:
+     AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+     LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+     FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+     DEALINGS IN THE SOFTWARE.
+ """
+ from __future__ import absolute_import
+ 
+ import platform
+-from ctypes.util import find_library
+ from ctypes import (
+-    c_void_p,
++    CDLL,
++    CFUNCTYPE,
++    POINTER,
++    c_bool,
++    c_byte,
++    c_char_p,
+     c_int32,
+-    c_char_p,
++    c_long,
+     c_size_t,
+-    c_byte,
+     c_uint32,
+     c_ulong,
+-    c_long,
+-    c_bool,
++    c_void_p,
+ )
+-from ctypes import CDLL, POINTER, CFUNCTYPE
+-
++from ctypes.util import find_library
+ 
+-security_path = find_library("Security")
+-if not security_path:
+-    raise ImportError("The library Security could not be found")
+-
++from urllib3.packages.six import raise_from
+ 
+-core_foundation_path = find_library("CoreFoundation")
+-if not core_foundation_path:
+-    raise ImportError("The library CoreFoundation could not be found")
+-
++if platform.system() != "Darwin":
++    raise ImportError("Only macOS is supported")
+ 
+ version = platform.mac_ver()[0]
+ version_info = tuple(map(int, version.split(".")))
+ if version_info < (10, 8):
+     raise OSError(
+         "Only OS X 10.8 and newer are supported, not %s.%s"
+         % (version_info[0], version_info[1])
+     )
+ 
+-Security = CDLL(security_path, use_errno=True)
+-CoreFoundation = CDLL(core_foundation_path, use_errno=True)
++
++def load_cdll(name, macos10_16_path):
++    """Loads a CDLL by name, falling back to known path on 10.16+"""
++    try:
++        # Big Sur is technically 11 but we use 10.16 due to the Big Sur
++        # beta being labeled as 10.16.
++        if version_info >= (10, 16):
++            path = macos10_16_path
++        else:
++            path = find_library(name)
++        if not path:
++            raise OSError  # Caught and reraised as 'ImportError'
++        return CDLL(path, use_errno=True)
++    except OSError:
++        raise_from(ImportError("The library %s failed to load" % name), None)
++
++
++Security = load_cdll(
++    "Security", "/System/Library/Frameworks/Security.framework/Security"
++)
++CoreFoundation = load_cdll(
++    "CoreFoundation",
++    "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
++)
++
+ 
+ Boolean = c_bool
+ CFIndex = c_long
+ CFStringEncoding = c_uint32
+ CFData = c_void_p
+ CFString = c_void_p
+ CFArray = c_void_p
+ CFMutableArray = c_void_p
+@@ -271,16 +290,23 @@ try:
+     Security.SSLSetSessionOption.restype = OSStatus
+ 
+     Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
+     Security.SSLSetProtocolVersionMin.restype = OSStatus
+ 
+     Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
+     Security.SSLSetProtocolVersionMax.restype = OSStatus
+ 
++    try:
++        Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
++        Security.SSLSetALPNProtocols.restype = OSStatus
++    except AttributeError:
++        # Supported only in 10.12+
++        pass
++
+     Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
+     Security.SecCopyErrorMessageString.restype = CFStringRef
+ 
+     Security.SSLReadFunc = SSLReadFunc
+     Security.SSLWriteFunc = SSLWriteFunc
+     Security.SSLContextRef = SSLContextRef
+     Security.SSLProtocol = SSLProtocol
+     Security.SSLCipherSuite = SSLCipherSuite
+diff --git a/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py b/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
+--- a/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
++++ b/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
+@@ -5,23 +5,23 @@ These are Python functions that are not 
+ but are necessary to get them to work. They include a whole bunch of low-level
+ CoreFoundation messing about and memory management. The concerns in this module
+ are almost entirely about trying to avoid memory leaks and providing
+ appropriate and useful assistance to the higher-level code.
+ """
+ import base64
+ import ctypes
+ import itertools
++import os
+ import re
+-import os
+ import ssl
++import struct
+ import tempfile
+ 
+-from .bindings import Security, CoreFoundation, CFConst
+-
++from .bindings import CFConst, CoreFoundation, Security
+ 
+ # This regular expression is used to grab PEM data out of a PEM bundle.
+ _PEM_CERTS_RE = re.compile(
+     b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
+ )
+ 
+ 
+ def _cf_data_from_bytes(bytestring):
+@@ -51,16 +51,61 @@ def _cf_dictionary_from_tuples(tuples):
+         cf_keys,
+         cf_values,
+         dictionary_size,
+         CoreFoundation.kCFTypeDictionaryKeyCallBacks,
+         CoreFoundation.kCFTypeDictionaryValueCallBacks,
+     )
+ 
+ 
++def _cfstr(py_bstr):
++    """
++    Given a Python binary data, create a CFString.
++    The string must be CFReleased by the caller.
++    """
++    c_str = ctypes.c_char_p(py_bstr)
++    cf_str = CoreFoundation.CFStringCreateWithCString(
++        CoreFoundation.kCFAllocatorDefault,
++        c_str,
++        CFConst.kCFStringEncodingUTF8,
++    )
++    return cf_str
++
++
++def _create_cfstring_array(lst):
++    """
++    Given a list of Python binary data, create an associated CFMutableArray.
++    The array must be CFReleased by the caller.
++
++    Raises an ssl.SSLError on failure.
++    """
++    cf_arr = None
++    try:
++        cf_arr = CoreFoundation.CFArrayCreateMutable(
++            CoreFoundation.kCFAllocatorDefault,
++            0,
++            ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
++        )
++        if not cf_arr:
++            raise MemoryError("Unable to allocate memory!")
++        for item in lst:
++            cf_str = _cfstr(item)
++            if not cf_str:
++                raise MemoryError("Unable to allocate memory!")
++            try:
++                CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
++            finally:
++                CoreFoundation.CFRelease(cf_str)
++    except BaseException as e:
++        if cf_arr:
++            CoreFoundation.CFRelease(cf_arr)
++        raise ssl.SSLError("Unable to allocate array: %s" % (e,))
++    return cf_arr
++
++
+ def _cf_string_to_unicode(value):
+     """
+     Creates a Unicode string from a CFString object. Used entirely for error
+     reporting.
+ 
+     Yes, it annoys me quite a lot that this function is this complex.
+     """
+     value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
+@@ -321,8 +366,31 @@ def _load_client_cert_chain(keychain, *p
+             # ArrayAppendValue does a CFRetain on the item. That's fine,
+             # because the finally block will release our other refs to them.
+             CoreFoundation.CFArrayAppendValue(trust_chain, item)
+ 
+         return trust_chain
+     finally:
+         for obj in itertools.chain(identities, certificates):
+             CoreFoundation.CFRelease(obj)
++
++
++TLS_PROTOCOL_VERSIONS = {
++    "SSLv2": (0, 2),
++    "SSLv3": (3, 0),
++    "TLSv1": (3, 1),
++    "TLSv1.1": (3, 2),
++    "TLSv1.2": (3, 3),
++}
++
++
++def _build_tls_unknown_ca_alert(version):
++    """
++    Builds a TLS alert record for an unknown CA.
++    """
++    ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
++    severity_fatal = 0x02
++    description_unknown_ca = 0x30
++    msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
++    msg_len = len(msg)
++    record_type_alert = 0x15
++    record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
++    return record
+diff --git a/third_party/python/urllib3/urllib3/contrib/appengine.py b/third_party/python/urllib3/urllib3/contrib/appengine.py
+--- a/third_party/python/urllib3/urllib3/contrib/appengine.py
++++ b/third_party/python/urllib3/urllib3/contrib/appengine.py
+@@ -34,34 +34,34 @@ 2. You can use a normal :class:`~urllib3
+             GAE_USE_SOCKETS_HTTPLIB : 'true'
+ 
+ 3. If you are using `App Engine Flexible
+ <https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
+ :class:`PoolManager` without any configuration or special environment variables.
+ """
+ 
+ from __future__ import absolute_import
++
+ import io
+ import logging
+ import warnings
+-from ..packages.six.moves.urllib.parse import urljoin
+ 
+ from ..exceptions import (
+     HTTPError,
+     HTTPWarning,
+     MaxRetryError,
+     ProtocolError,
++    SSLError,
+     TimeoutError,
+-    SSLError,
+ )
+-
++from ..packages.six.moves.urllib.parse import urljoin
+ from ..request import RequestMethods
+ from ..response import HTTPResponse
++from ..util.retry import Retry
+ from ..util.timeout import Timeout
+-from ..util.retry import Retry
+ from . import _appengine_environ
+ 
+ try:
+     from google.appengine.api import urlfetch
+ except ImportError:
+     urlfetch = None
+ 
+ 
+@@ -85,17 +85,17 @@ class AppEngineManager(RequestMethods):
+     the App Engine documentation `here
+     <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
+ 
+     Notably it will raise an :class:`AppEnginePlatformError` if:
+         * URLFetch is not available.
+         * If you attempt to use this on App Engine Flexible, as full socket
+           support is available.
+         * If a request size is more than 10 megabytes.
+-        * If a response size is more than 32 megabtyes.
++        * If a response size is more than 32 megabytes.
+         * If you use an unsupported request method such as OPTIONS.
+ 
+     Beyond those cases, it will raise normal urllib3 errors.
+     """
+ 
+     def __init__(
+         self,
+         headers=None,
+diff --git a/third_party/python/urllib3/urllib3/contrib/ntlmpool.py b/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
+--- a/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
++++ b/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
+@@ -1,22 +1,22 @@
+ """
+ NTLM authenticating pool, contributed by erikcederstran
+ 
+ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+ """
+ from __future__ import absolute_import
+ 
+ from logging import getLogger
++
+ from ntlm import ntlm
+ 
+ from .. import HTTPSConnectionPool
+ from ..packages.six.moves.http_client import HTTPSConnection
+ 
+-
+ log = getLogger(__name__)
+ 
+ 
+ class NTLMConnectionPool(HTTPSConnectionPool):
+     """
+     Implements an NTLM authentication version of an urllib3 connection pool
+     """
+ 
+diff --git a/third_party/python/urllib3/urllib3/contrib/pyopenssl.py b/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
+--- a/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
++++ b/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
+@@ -1,82 +1,86 @@
+ """
+-SSL with SNI_-support for Python 2. Follow these instructions if you would
+-like to verify SSL certificates in Python 2. Note, the default libraries do
++TLS with SNI_-support for Python 2. Follow these instructions if you would
++like to verify TLS certificates in Python 2. Note, the default libraries do
+ *not* do certificate checking; you need to do additional work to validate
+ certificates yourself.
+ 
+ This needs the following packages installed:
+ 
+-* pyOpenSSL (tested with 16.0.0)
+-* cryptography (minimum 1.3.4, from pyopenssl)
+-* idna (minimum 2.0, from cryptography)
++* `pyOpenSSL`_ (tested with 16.0.0)
++* `cryptography`_ (minimum 1.3.4, from pyopenssl)
++* `idna`_ (minimum 2.0, from cryptography)
+ 
+ However, pyopenssl depends on cryptography, which depends on idna, so while we
+ use all three directly here we end up having relatively few packages required.
+ 
+ You can install them with the following command:
+ 
+-    pip install pyopenssl cryptography idna
++.. code-block:: bash
++
++    $ python -m pip install pyopenssl cryptography idna
+ 
+ To activate certificate checking, call
+ :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
+ before you begin making HTTP requests. This can be done in a ``sitecustomize``
+ module, or at any other time before your application begins using ``urllib3``,
+-like this::
++like this:
++
++.. code-block:: python
+ 
+     try:
+         import urllib3.contrib.pyopenssl
+         urllib3.contrib.pyopenssl.inject_into_urllib3()
+     except ImportError:
+         pass
+ 
+ Now you can use :mod:`urllib3` as you normally would, and it will support SNI
+ when the required modules are installed.
+ 
+ Activating this module also has the positive side effect of disabling SSL/TLS
+ compression in Python 2 (see `CRIME attack`_).
+ 
+-If you want to configure the default list of supported cipher suites, you can
+-set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable.
+-
+ .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
+ .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
++.. _pyopenssl: https://www.pyopenssl.org
++.. _cryptography: https://cryptography.io
++.. _idna: https://github.com/kjd/idna
+ """
+ from __future__ import absolute_import
+ 
+ import OpenSSL.SSL
+ from cryptography import x509
+ from cryptography.hazmat.backends.openssl import backend as openssl_backend
+ from cryptography.hazmat.backends.openssl.x509 import _Certificate
+ 
+ try:
+     from cryptography.x509 import UnsupportedExtension
+ except ImportError:
+     # UnsupportedExtension is gone in cryptography >= 2.1.0
+     class UnsupportedExtension(Exception):
+         pass
+ 
+ 
+-from socket import timeout, error as SocketError
+ from io import BytesIO
++from socket import error as SocketError
++from socket import timeout
+ 
+ try:  # Platform-specific: Python 2
+     from socket import _fileobject
+ except ImportError:  # Platform-specific: Python 3
+     _fileobject = None
+     from ..packages.backports.makefile import backport_makefile
+ 
+ import logging
+ import ssl
+-from ..packages import six
+ import sys
+ 
+ from .. import util
+-
++from ..packages import six
+ 
+ __all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+ 
+ # SNI always works.
+ HAS_SNI = True
+ 
+ # Map from urllib3 to PyOpenSSL compatible parameter-values.
+ _openssl_versions = {
+@@ -460,16 +464,20 @@ class PyOpenSSLContext(object):
+     def load_cert_chain(self, certfile, keyfile=None, password=None):
+         self._ctx.use_certificate_chain_file(certfile)
+         if password is not None:
+             if not isinstance(password, six.binary_type):
+                 password = password.encode("utf-8")
+             self._ctx.set_passwd_cb(lambda *_: password)
+         self._ctx.use_privatekey_file(keyfile or certfile)
+ 
++    def set_alpn_protocols(self, protocols):
++        protocols = [six.ensure_binary(p) for p in protocols]
++        return self._ctx.set_alpn_protos(protocols)
++
+     def wrap_socket(
+         self,
+         sock,
+         server_side=False,
+         do_handshake_on_connect=True,
+         suppress_ragged_eofs=True,
+         server_hostname=None,
+     ):
+diff --git a/third_party/python/urllib3/urllib3/contrib/securetransport.py b/third_party/python/urllib3/urllib3/contrib/securetransport.py
+--- a/third_party/python/urllib3/urllib3/contrib/securetransport.py
++++ b/third_party/python/urllib3/urllib3/contrib/securetransport.py
+@@ -24,16 +24,18 @@ To use this module, simply import and in
+ 
+ Happy TLSing!
+ 
+ This code is a bastardised version of the code found in Will Bond's oscrypto
+ library. An enormous debt is owed to him for blazing this trail for us. For
+ that reason, this code should be considered to be covered both by urllib3's
+ license and by oscrypto's:
+ 
++.. code-block::
++
+     Copyright (c) 2015-2016 Will Bond <will@wbond.net>
+ 
+     Permission is hereby granted, free of charge, to any person obtaining a
+     copy of this software and associated documentation files (the "Software"),
+     to deal in the Software without restriction, including without limitation
+     the rights to use, copy, modify, merge, publish, distribute, sublicense,
+     and/or sell copies of the Software, and to permit persons to whom the
+     Software is furnished to do so, subject to the following conditions:
+@@ -53,26 +55,31 @@ from __future__ import absolute_import
+ 
+ import contextlib
+ import ctypes
+ import errno
+ import os.path
+ import shutil
+ import socket
+ import ssl
++import struct
+ import threading
+ import weakref
+ 
++import six
++
+ from .. import util
+-from ._securetransport.bindings import Security, SecurityConst, CoreFoundation
++from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
+ from ._securetransport.low_level import (
+     _assert_no_error,
++    _build_tls_unknown_ca_alert,
+     _cert_array_from_pem,
++    _create_cfstring_array,
++    _load_client_cert_chain,
+     _temporary_keychain,
+-    _load_client_cert_chain,
+ )
+ 
+ try:  # Platform-specific: Python 2
+     from socket import _fileobject
+ except ImportError:  # Platform-specific: Python 3
+     _fileobject = None
+     from ..packages.backports.makefile import backport_makefile
+ 
+@@ -369,26 +376,65 @@ class WrappedSocket(object):
+         OpenSSL cipher strings is going to be a freaking nightmare.
+         """
+         ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
+         result = Security.SSLSetEnabledCiphers(
+             self.context, ciphers, len(CIPHER_SUITES)
+         )
+         _assert_no_error(result)
+ 
++    def _set_alpn_protocols(self, protocols):
++        """
++        Sets up the ALPN protocols on the context.
++        """
++        if not protocols:
++            return
++        protocols_arr = _create_cfstring_array(protocols)
++        try:
++            result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
++            _assert_no_error(result)
++        finally:
++            CoreFoundation.CFRelease(protocols_arr)
++
+     def _custom_validate(self, verify, trust_bundle):
+         """
+         Called when we have set custom validation. We do this in two cases:
+         first, when cert validation is entirely disabled; and second, when
+         using a custom trust DB.
++        Raises an SSLError if the connection is not trusted.
+         """
+         # If we disabled cert validation, just say: cool.
+         if not verify:
+             return
+ 
++        successes = (
++            SecurityConst.kSecTrustResultUnspecified,
++            SecurityConst.kSecTrustResultProceed,
++        )
++        try:
++            trust_result = self._evaluate_trust(trust_bundle)
++            if trust_result in successes:
++                return
++            reason = "error code: %d" % (trust_result,)
++        except Exception as e:
++            # Do not trust on error
++            reason = "exception: %r" % (e,)
++
++        # SecureTransport does not send an alert nor shuts down the connection.
++        rec = _build_tls_unknown_ca_alert(self.version())
++        self.socket.sendall(rec)
++        # close the connection immediately
++        # l_onoff = 1, activate linger
++        # l_linger = 0, linger for 0 seoncds
++        opts = struct.pack("ii", 1, 0)
++        self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
++        self.close()
++        raise ssl.SSLError("certificate verify failed, %s" % reason)
++
++    def _evaluate_trust(self, trust_bundle):
+         # We want data in memory, so load it up.
+         if os.path.isfile(trust_bundle):
+             with open(trust_bundle, "rb") as f:
+                 trust_bundle = f.read()
+ 
+         cert_array = None
+         trust = Security.SecTrustRef()
+ 
+@@ -416,36 +462,29 @@ class WrappedSocket(object):
+             _assert_no_error(result)
+         finally:
+             if trust:
+                 CoreFoundation.CFRelease(trust)
+ 
+             if cert_array is not None:
+                 CoreFoundation.CFRelease(cert_array)
+ 
+-        # Ok, now we can look at what the result was.
+-        successes = (
+-            SecurityConst.kSecTrustResultUnspecified,
+-            SecurityConst.kSecTrustResultProceed,
+-        )
+-        if trust_result.value not in successes:
+-            raise ssl.SSLError(
+-                "certificate verify failed, error code: %d" % trust_result.value
+-            )
++        return trust_result.value
+ 
+     def handshake(
+         self,
+         server_hostname,
+         verify,
+         trust_bundle,
+         min_version,
+         max_version,
+         client_cert,
+         client_key,
+         client_key_passphrase,
++        alpn_protocols,
+     ):
+         """
+         Actually performs the TLS handshake. This is run automatically by
+         wrapped socket, and shouldn't be needed in user code.
+         """
+         # First, we do the initial bits of connection setup. We need to create
+         # a context, set its I/O funcs, and set the connection reference.
+         self.context = Security.SSLCreateContext(
+@@ -476,16 +515,19 @@ class WrappedSocket(object):
+             result = Security.SSLSetPeerDomainName(
+                 self.context, server_hostname, len(server_hostname)
+             )
+             _assert_no_error(result)
+ 
+         # Setup the ciphers.
+         self._set_ciphers()
+ 
++        # Setup the ALPN protocols.
++        self._set_alpn_protocols(alpn_protocols)
++
+         # Set the minimum and maximum TLS versions.
+         result = Security.SSLSetProtocolVersionMin(self.context, min_version)
+         _assert_no_error(result)
+ 
+         result = Security.SSLSetProtocolVersionMax(self.context, max_version)
+         _assert_no_error(result)
+ 
+         # If there's a trust DB, we need to use it. We do that by telling
+@@ -749,16 +791,17 @@ class SecureTransportContext(object):
+     def __init__(self, protocol):
+         self._min_version, self._max_version = _protocol_to_min_max[protocol]
+         self._options = 0
+         self._verify = False
+         self._trust_bundle = None
+         self._client_cert = None
+         self._client_key = None
+         self._client_key_passphrase = None
++        self._alpn_protocols = None
+ 
+     @property
+     def check_hostname(self):
+         """
+         SecureTransport cannot have its hostname checking disabled. For more,
+         see the comment on getpeercert() in this file.
+         """
+         return True
+@@ -826,16 +869,28 @@ class SecureTransportContext(object):
+ 
+         self._trust_bundle = cafile or cadata
+ 
+     def load_cert_chain(self, certfile, keyfile=None, password=None):
+         self._client_cert = certfile
+         self._client_key = keyfile
+         self._client_cert_passphrase = password
+ 
++    def set_alpn_protocols(self, protocols):
++        """
++        Sets the ALPN protocols that will later be set on the context.
++
++        Raises a NotImplementedError if ALPN is not supported.
++        """
++        if not hasattr(Security, "SSLSetALPNProtocols"):
++            raise NotImplementedError(
++                "SecureTransport supports ALPN only in macOS 10.12+"
++            )
++        self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
++
+     def wrap_socket(
+         self,
+         sock,
+         server_side=False,
+         do_handshake_on_connect=True,
+         suppress_ragged_eofs=True,
+         server_hostname=None,
+     ):
+@@ -855,10 +910,11 @@ class SecureTransportContext(object):
+             server_hostname,
+             self._verify,
+             self._trust_bundle,
+             self._min_version,
+             self._max_version,
+             self._client_cert,
+             self._client_key,
+             self._client_key_passphrase,
++            self._alpn_protocols,
+         )
+         return wrapped_socket
+diff --git a/third_party/python/urllib3/urllib3/contrib/socks.py b/third_party/python/urllib3/urllib3/contrib/socks.py
+--- a/third_party/python/urllib3/urllib3/contrib/socks.py
++++ b/third_party/python/urllib3/urllib3/contrib/socks.py
+@@ -9,55 +9,61 @@ The SOCKS implementation supports the fu
+ supports the following SOCKS features:
+ 
+ - SOCKS4A (``proxy_url='socks4a://...``)
+ - SOCKS4 (``proxy_url='socks4://...``)
+ - SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
+ - SOCKS5 with local DNS (``proxy_url='socks5://...``)
+ - Usernames and passwords for the SOCKS proxy
+ 
+- .. note::
+-    It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
+-    your ``proxy_url`` to ensure that DNS resolution is done from the remote
+-    server instead of client-side when connecting to a domain name.
++.. note::
++   It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
++   your ``proxy_url`` to ensure that DNS resolution is done from the remote
++   server instead of client-side when connecting to a domain name.
+ 
+ SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
+ supports IPv4, IPv6, and domain names.
+ 
+ When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
+-will be sent as the ``userid`` section of the SOCKS request::
++will be sent as the ``userid`` section of the SOCKS request:
++
++.. code-block:: python
+ 
+     proxy_url="socks4a://<userid>@proxy-host"
+ 
+ When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
+ of the ``proxy_url`` will be sent as the username/password to authenticate
+-with the proxy::
++with the proxy:
++
++.. code-block:: python
+ 
+     proxy_url="socks5h://<username>:<password>@proxy-host"
+ 
+ """
+ from __future__ import absolute_import
+ 
+ try:
+     import socks
+ except ImportError:
+     import warnings
++
+     from ..exceptions import DependencyWarning
+ 
+     warnings.warn(
+         (
+             "SOCKS support in urllib3 requires the installation of optional "
+             "dependencies: specifically, PySocks.  For more information, see "
+             "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies"
+         ),
+         DependencyWarning,
+     )
+     raise
+ 
+-from socket import error as SocketError, timeout as SocketTimeout
++from socket import error as SocketError
++from socket import timeout as SocketTimeout
+ 
+ from ..connection import HTTPConnection, HTTPSConnection
+ from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+ from ..exceptions import ConnectTimeoutError, NewConnectionError
+ from ..poolmanager import PoolManager
+ from ..util.url import parse_url
+ 
+ try:
+diff --git a/third_party/python/urllib3/urllib3/exceptions.py b/third_party/python/urllib3/urllib3/exceptions.py
+--- a/third_party/python/urllib3/urllib3/exceptions.py
++++ b/third_party/python/urllib3/urllib3/exceptions.py
+@@ -1,68 +1,74 @@
+ from __future__ import absolute_import
++
+ from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
+ 
+ # Base Exceptions
+ 
+ 
+ class HTTPError(Exception):
+-    "Base exception used by this module."
++    """Base exception used by this module."""
++
+     pass
+ 
+ 
+ class HTTPWarning(Warning):
+-    "Base warning used by this module."
++    """Base warning used by this module."""
++
+     pass
+ 
+ 
+ class PoolError(HTTPError):
+-    "Base exception for errors caused within a pool."
++    """Base exception for errors caused within a pool."""
+ 
+     def __init__(self, pool, message):
+         self.pool = pool
+         HTTPError.__init__(self, "%s: %s" % (pool, message))
+ 
+     def __reduce__(self):
+         # For pickling purposes.
+         return self.__class__, (None, None)
+ 
+ 
+ class RequestError(PoolError):
+-    "Base exception for PoolErrors that have associated URLs."
++    """Base exception for PoolErrors that have associated URLs."""
+ 
+     def __init__(self, pool, url, message):
+         self.url = url
+         PoolError.__init__(self, pool, message)
+ 
+     def __reduce__(self):
+         # For pickling purposes.
+         return self.__class__, (None, self.url, None)
+ 
+ 
+ class SSLError(HTTPError):
+-    "Raised when SSL certificate fails in an HTTPS connection."
++    """Raised when SSL certificate fails in an HTTPS connection."""
++
+     pass
+ 
+ 
+ class ProxyError(HTTPError):
+-    "Raised when the connection to a proxy fails."
++    """Raised when the connection to a proxy fails."""
+ 
+     def __init__(self, message, error, *args):
+         super(ProxyError, self).__init__(message, error, *args)
+         self.original_error = error
+ 
+ 
+ class DecodeError(HTTPError):
+-    "Raised when automatic decoding based on Content-Type fails."
++    """Raised when automatic decoding based on Content-Type fails."""
++
+     pass
+ 
+ 
+ class ProtocolError(HTTPError):
+-    "Raised when something unexpected happens mid-request/response."
++    """Raised when something unexpected happens mid-request/response."""
++
+     pass
+ 
+ 
+ #: Renamed to ProtocolError but aliased for backwards compatibility.
+ ConnectionError = ProtocolError
+ 
+ 
+ # Leaf Exceptions
+@@ -82,191 +88,226 @@ class MaxRetryError(RequestError):
+         self.reason = reason
+ 
+         message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
+ 
+         RequestError.__init__(self, pool, url, message)
+ 
+ 
+ class HostChangedError(RequestError):
+-    "Raised when an existing pool gets a request for a foreign host."
++    """Raised when an existing pool gets a request for a foreign host."""
+ 
+     def __init__(self, pool, url, retries=3):
+         message = "Tried to open a foreign host with url: %s" % url
+         RequestError.__init__(self, pool, url, message)
+         self.retries = retries
+ 
+ 
+ class TimeoutStateError(HTTPError):
+-    """ Raised when passing an invalid state to a timeout """
++    """Raised when passing an invalid state to a timeout"""
+ 
+     pass
+ 
+ 
+ class TimeoutError(HTTPError):
+-    """ Raised when a socket timeout error occurs.
++    """Raised when a socket timeout error occurs.
+ 
+     Catching this error will catch both :exc:`ReadTimeoutErrors
+     <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
+     """
+ 
+     pass
+ 
+ 
+ class ReadTimeoutError(TimeoutError, RequestError):
+-    "Raised when a socket timeout occurs while receiving data from a server"
++    """Raised when a socket timeout occurs while receiving data from a server"""
++
+     pass
+ 
+ 
+ # This timeout error does not have a URL attached and needs to inherit from the
+ # base HTTPError
+ class ConnectTimeoutError(TimeoutError):
+-    "Raised when a socket timeout occurs while connecting to a server"
++    """Raised when a socket timeout occurs while connecting to a server"""
++
+     pass
+ 
+ 
+ class NewConnectionError(ConnectTimeoutError, PoolError):
+-    "Raised when we fail to establish a new connection. Usually ECONNREFUSED."
++    """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
++
+     pass
+ 
+ 
+ class EmptyPoolError(PoolError):
+-    "Raised when a pool runs out of connections and no more are allowed."
++    """Raised when a pool runs out of connections and no more are allowed."""
++
+     pass
+ 
+ 
+ class ClosedPoolError(PoolError):
+-    "Raised when a request enters a pool after the pool has been closed."
++    """Raised when a request enters a pool after the pool has been closed."""
++
+     pass
+ 
+ 
+ class LocationValueError(ValueError, HTTPError):
+-    "Raised when there is something wrong with a given URL input."
++    """Raised when there is something wrong with a given URL input."""
++
+     pass
+ 
+ 
+ class LocationParseError(LocationValueError):
+-    "Raised when get_host or similar fails to parse the URL input."
++    """Raised when get_host or similar fails to parse the URL input."""
+ 
+     def __init__(self, location):
+         message = "Failed to parse: %s" % location
+         HTTPError.__init__(self, message)
+ 
+         self.location = location
+ 
+ 
++class URLSchemeUnknown(LocationValueError):
++    """Raised when a URL input has an unsupported scheme."""
++
++    def __init__(self, scheme):
++        message = "Not supported URL scheme %s" % scheme
++        super(URLSchemeUnknown, self).__init__(message)
++
++        self.scheme = scheme
++
++
+ class ResponseError(HTTPError):
+-    "Used as a container for an error reason supplied in a MaxRetryError."
++    """Used as a container for an error reason supplied in a MaxRetryError."""
++
+     GENERIC_ERROR = "too many error responses"
+     SPECIFIC_ERROR = "too many {status_code} error responses"
+ 
+ 
+ class SecurityWarning(HTTPWarning):
+-    "Warned when performing security reducing actions"
++    """Warned when performing security reducing actions"""
++
+     pass
+ 
+ 
+ class SubjectAltNameWarning(SecurityWarning):
+-    "Warned when connecting to a host with a certificate missing a SAN."
++    """Warned when connecting to a host with a certificate missing a SAN."""
++
+     pass
+ 
+ 
+ class InsecureRequestWarning(SecurityWarning):
+-    "Warned when making an unverified HTTPS request."
++    """Warned when making an unverified HTTPS request."""
++
+     pass
+ 
+ 
+ class SystemTimeWarning(SecurityWarning):
+-    "Warned when system time is suspected to be wrong"
++    """Warned when system time is suspected to be wrong"""
++
+     pass
+ 
+ 
+ class InsecurePlatformWarning(SecurityWarning):
+-    "Warned when certain SSL configuration is not available on a platform."
++    """Warned when certain TLS/SSL configuration is not available on a platform."""
++
+     pass
+ 
+ 
+ class SNIMissingWarning(HTTPWarning):
+-    "Warned when making a HTTPS request without SNI available."
++    """Warned when making a HTTPS request without SNI available."""
++
+     pass
+ 
+ 
+ class DependencyWarning(HTTPWarning):
+     """
+     Warned when an attempt is made to import a module with missing optional
+     dependencies.
+     """
+ 
+     pass
+ 
+ 
+-class InvalidProxyConfigurationWarning(HTTPWarning):
+-    """
+-    Warned when using an HTTPS proxy and an HTTPS URL. Currently
+-    urllib3 doesn't support HTTPS proxies and the proxy will be
+-    contacted via HTTP instead. This warning can be fixed by
+-    changing your HTTPS proxy URL into an HTTP proxy URL.
++class ResponseNotChunked(ProtocolError, ValueError):
++    """Response needs to be chunked in order to read it as chunks."""
+ 
+-    If you encounter this warning read this:
+-    https://github.com/urllib3/urllib3/issues/1850
+-    """
+-
+-    pass
+-
+-
+-class ResponseNotChunked(ProtocolError, ValueError):
+-    "Response needs to be chunked in order to read it as chunks."
+     pass
+ 
+ 
+ class BodyNotHttplibCompatible(HTTPError):
+     """
+-    Body should be httplib.HTTPResponse like (have an fp attribute which
+-    returns raw chunks) for read_chunked().
++    Body should be :class:`http.client.HTTPResponse` like
++    (have an fp attribute which returns raw chunks) for read_chunked().
+     """
+ 
+     pass
+ 
+ 
+ class IncompleteRead(HTTPError, httplib_IncompleteRead):
+     """
+     Response length doesn't match expected Content-Length
+ 
+-    Subclass of http_client.IncompleteRead to allow int value
+-    for `partial` to avoid creating large objects on streamed
+-    reads.
++    Subclass of :class:`http.client.IncompleteRead` to allow int value
++    for ``partial`` to avoid creating large objects on streamed reads.
+     """
+ 
+     def __init__(self, partial, expected):
+         super(IncompleteRead, self).__init__(partial, expected)
+ 
+     def __repr__(self):
+         return "IncompleteRead(%i bytes read, %i more expected)" % (
+             self.partial,
+             self.expected,
+         )
+ 
+ 
++class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
++    """Invalid chunk length in a chunked response."""
++
++    def __init__(self, response, length):
++        super(InvalidChunkLength, self).__init__(
++            response.tell(), response.length_remaining
++        )
++        self.response = response
++        self.length = length
++
++    def __repr__(self):
++        return "InvalidChunkLength(got length %r, %i bytes read)" % (
++            self.length,
++            self.partial,
++        )
++
++
+ class InvalidHeader(HTTPError):
+-    "The header provided was somehow invalid."
++    """The header provided was somehow invalid."""
++
+     pass
+ 
+ 
+-class ProxySchemeUnknown(AssertionError, ValueError):
+-    "ProxyManager does not support the supplied scheme"
++class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
++    """ProxyManager does not support the supplied scheme"""
++
+     # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+ 
+     def __init__(self, scheme):
+         message = "Not supported proxy scheme %s" % scheme
+         super(ProxySchemeUnknown, self).__init__(message)
+ 
+ 
++class ProxySchemeUnsupported(ValueError):
++    """Fetching HTTPS resources through HTTPS proxies is unsupported"""
++
++    pass
++
++
+ class HeaderParsingError(HTTPError):
+-    "Raised by assert_header_parsing, but we convert it to a log.warning statement."
++    """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
+ 
+     def __init__(self, defects, unparsed_data):
+         message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
+         super(HeaderParsingError, self).__init__(message)
+ 
+ 
+ class UnrewindableBodyError(HTTPError):
+-    "urllib3 encountered an error when trying to rewind a body"
++    """urllib3 encountered an error when trying to rewind a body"""
++
+     pass
+diff --git a/third_party/python/urllib3/urllib3/fields.py b/third_party/python/urllib3/urllib3/fields.py
+--- a/third_party/python/urllib3/urllib3/fields.py
++++ b/third_party/python/urllib3/urllib3/fields.py
+@@ -1,9 +1,10 @@
+ from __future__ import absolute_import
++
+ import email.utils
+ import mimetypes
+ import re
+ 
+ from .packages import six
+ 
+ 
+ def guess_content_type(filename, default="application/octet-stream"):
+@@ -21,17 +22,18 @@ def guess_content_type(filename, default
+ 
+ 
+ def format_header_param_rfc2231(name, value):
+     """
+     Helper function to format and quote a single header parameter using the
+     strategy defined in RFC 2231.
+ 
+     Particularly useful for header parameters which might contain
+-    non-ASCII values, like file names. This follows RFC 2388 Section 4.4.
++    non-ASCII values, like file names. This follows
++    `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
+ 
+     :param name:
+         The name of the parameter, a string expected to be ASCII only.
+     :param value:
+         The value of the parameter, provided as ``bytes`` or `str``.
+     :ret:
+         An RFC-2231-formatted unicode string.
+     """
+@@ -60,17 +62,16 @@ def format_header_param_rfc2231(name, va
+ 
+     return value
+ 
+ 
+ _HTML5_REPLACEMENTS = {
+     u"\u0022": u"%22",
+     # Replace "\" with "\\".
+     u"\u005C": u"\u005C\u005C",
+-    u"\u005C": u"\u005C\u005C",
+ }
+ 
+ # All control characters from 0x00 to 0x1F *except* 0x1B.
+ _HTML5_REPLACEMENTS.update(
+     {
+         six.unichr(cc): u"%{:02X}".format(cc)
+         for cc in range(0x00, 0x1F + 1)
+         if cc not in (0x1B,)
+diff --git a/third_party/python/urllib3/urllib3/filepost.py b/third_party/python/urllib3/urllib3/filepost.py
+--- a/third_party/python/urllib3/urllib3/filepost.py
++++ b/third_party/python/urllib3/urllib3/filepost.py
+@@ -1,18 +1,18 @@
+ from __future__ import absolute_import
++
+ import binascii
+ import codecs
+ import os
+-
+ from io import BytesIO
+ 
++from .fields import RequestField
+ from .packages import six
+ from .packages.six import b
+-from .fields import RequestField
+ 
+ writer = codecs.lookup("utf-8")[3]
+ 
+ 
+ def choose_boundary():
+     """
+     Our embarrassingly-simple replacement for mimetools.choose_boundary.
+     """
+diff --git a/third_party/python/urllib3/urllib3/packages/backports/makefile.py b/third_party/python/urllib3/urllib3/packages/backports/makefile.py
+--- a/third_party/python/urllib3/urllib3/packages/backports/makefile.py
++++ b/third_party/python/urllib3/urllib3/packages/backports/makefile.py
+@@ -2,17 +2,16 @@
+ """
+ backports.makefile
+ ~~~~~~~~~~~~~~~~~~
+ 
+ Backports the Python 3 ``socket.makefile`` method for use with anything that
+ wants to create a "fake" socket object.
+ """
+ import io
+-
+ from socket import SocketIO
+ 
+ 
+ def backport_makefile(
+     self, mode="r", buffering=None, encoding=None, errors=None, newline=None
+ ):
+     """
+     Backport of ``socket.makefile`` from Python 3.5.
+diff --git a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py b/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
+--- a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
++++ b/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
+@@ -5,15 +5,18 @@ try:
+     # import the match_hostname function if it's at least that good.
+     if sys.version_info < (3, 5):
+         raise ImportError("Fallback to vendored code")
+ 
+     from ssl import CertificateError, match_hostname
+ except ImportError:
+     try:
+         # Backport of the function from a pypi module
+-        from backports.ssl_match_hostname import CertificateError, match_hostname
++        from backports.ssl_match_hostname import (  # type: ignore
++            CertificateError,
++            match_hostname,
++        )
+     except ImportError:
+         # Our vendored copy
+-        from ._implementation import CertificateError, match_hostname
++        from ._implementation import CertificateError, match_hostname  # type: ignore
+ 
+ # Not needed, but documenting what we provide.
+ __all__ = ("CertificateError", "match_hostname")
+diff --git a/third_party/python/urllib3/urllib3/poolmanager.py b/third_party/python/urllib3/urllib3/poolmanager.py
+--- a/third_party/python/urllib3/urllib3/poolmanager.py
++++ b/third_party/python/urllib3/urllib3/poolmanager.py
+@@ -1,29 +1,29 @@
+ from __future__ import absolute_import
++
+ import collections
+ import functools
+ import logging
+-import warnings
+ 
+ from ._collections import RecentlyUsedContainer
+-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+-from .connectionpool import port_by_scheme
++from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme
+ from .exceptions import (
+     LocationValueError,
+     MaxRetryError,
+     ProxySchemeUnknown,
+-    InvalidProxyConfigurationWarning,
++    ProxySchemeUnsupported,
++    URLSchemeUnknown,
+ )
+ from .packages import six
+ from .packages.six.moves.urllib.parse import urljoin
+ from .request import RequestMethods
+-from .util.url import parse_url
++from .util.proxy import connection_requires_http_tunnel
+ from .util.retry import Retry
+-
++from .util.url import parse_url
+ 
+ __all__ = ["PoolManager", "ProxyManager", "proxy_from_url"]
+ 
+ 
+ log = logging.getLogger(__name__)
+ 
+ SSL_KEYWORDS = (
+     "key_file",
+@@ -54,27 +54,31 @@ SSL_KEYWORDS = (
+     "key_ca_certs",  # str
+     "key_ssl_version",  # str
+     "key_ca_cert_dir",  # str
+     "key_ssl_context",  # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext
+     "key_maxsize",  # int
+     "key_headers",  # dict
+     "key__proxy",  # parsed proxy url
+     "key__proxy_headers",  # dict
++    "key__proxy_config",  # class
+     "key_socket_options",  # list of (level (int), optname (int), value (int or str)) tuples
+     "key__socks_options",  # dict
+     "key_assert_hostname",  # bool or string
+     "key_assert_fingerprint",  # str
+     "key_server_hostname",  # str
+ )
+ 
+ #: The namedtuple class used to construct keys for the connection pool.
+ #: All custom key schemes should include the fields in this key at a minimum.
+ PoolKey = collections.namedtuple("PoolKey", _key_fields)
+ 
++_proxy_config_fields = ("ssl_context", "use_forwarding_for_https")
++ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields)
++
+ 
+ def _default_key_normalizer(key_class, request_context):
+     """
+     Create a pool key out of a request context dictionary.
+ 
+     According to RFC 3986, both the scheme and host are case-insensitive.
+     Therefore, this function normalizes both before constructing the pool
+     key for an HTTPS request. If you wish to change this behaviour, provide
+@@ -156,16 +160,17 @@ class PoolManager(RequestMethods):
+         >>> r = manager.request('GET', 'http://google.com/mail')
+         >>> r = manager.request('GET', 'http://yahoo.com/')
+         >>> len(manager.pools)
+         2
+ 
+     """
+ 
+     proxy = None
++    proxy_config = None
+ 
+     def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
+         RequestMethods.__init__(self, headers)
+         self.connection_pool_kw = connection_pool_kw
+         self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
+ 
+         # Locally set the pool classes and keys so other PoolManagers can
+         # override them.
+@@ -177,17 +182,17 @@ class PoolManager(RequestMethods):
+ 
+     def __exit__(self, exc_type, exc_val, exc_tb):
+         self.clear()
+         # Return False to re-raise any potential exceptions
+         return False
+ 
+     def _new_pool(self, scheme, host, port, request_context=None):
+         """
+-        Create a new :class:`ConnectionPool` based on host, port, scheme, and
++        Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and
+         any additional pool keyword arguments.
+ 
+         If ``request_context`` is provided, it is provided as keyword arguments
+         to the pool class used. This method is used to actually create the
+         connection pools handed out by :meth:`connection_from_url` and
+         companion methods. It is intended to be overridden for customization.
+         """
+         pool_cls = self.pool_classes_by_scheme[scheme]
+@@ -213,17 +218,17 @@ class PoolManager(RequestMethods):
+ 
+         This will not affect in-flight connections, but they will not be
+         re-used after completion.
+         """
+         self.pools.clear()
+ 
+     def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+         """
+-        Get a :class:`ConnectionPool` based on the host, port, and scheme.
++        Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme.
+ 
+         If ``port`` isn't given, it will be derived from the ``scheme`` using
+         ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is
+         provided, it is merged with the instance's ``connection_pool_kw``
+         variable and used to create the new connection pool, if one is
+         needed.
+         """
+ 
+@@ -236,30 +241,32 @@ class PoolManager(RequestMethods):
+             port = port_by_scheme.get(request_context["scheme"].lower(), 80)
+         request_context["port"] = port
+         request_context["host"] = host
+ 
+         return self.connection_from_context(request_context)
+ 
+     def connection_from_context(self, request_context):
+         """
+-        Get a :class:`ConnectionPool` based on the request context.
++        Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context.
+ 
+         ``request_context`` must at least contain the ``scheme`` key and its
+         value must be a key in ``key_fn_by_scheme`` instance variable.
+         """
+         scheme = request_context["scheme"].lower()
+-        pool_key_constructor = self.key_fn_by_scheme[scheme]
++        pool_key_constructor = self.key_fn_by_scheme.get(scheme)
++        if not pool_key_constructor:
++            raise URLSchemeUnknown(scheme)
+         pool_key = pool_key_constructor(request_context)
+ 
+         return self.connection_from_pool_key(pool_key, request_context=request_context)
+ 
+     def connection_from_pool_key(self, pool_key, request_context=None):
+         """
+-        Get a :class:`ConnectionPool` based on the provided pool key.
++        Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key.
+ 
+         ``pool_key`` should be a namedtuple that only contains immutable
+         objects. At a minimum it must have the ``scheme``, ``host``, and
+         ``port`` fields.
+         """
+         with self.pools.lock:
+             # If the scheme, host, or port doesn't match existing open
+             # connections, open a new ConnectionPool.
+@@ -307,35 +314,67 @@ class PoolManager(RequestMethods):
+                     try:
+                         del base_pool_kwargs[key]
+                     except KeyError:
+                         pass
+                 else:
+                     base_pool_kwargs[key] = value
+         return base_pool_kwargs
+ 
++    def _proxy_requires_url_absolute_form(self, parsed_url):
++        """
++        Indicates if the proxy requires the complete destination URL in the
++        request.  Normally this is only needed when not using an HTTP CONNECT
++        tunnel.
++        """
++        if self.proxy is None:
++            return False
++
++        return not connection_requires_http_tunnel(
++            self.proxy, self.proxy_config, parsed_url.scheme
++        )
++
++    def _validate_proxy_scheme_url_selection(self, url_scheme):
++        """
++        Validates that were not attempting to do TLS in TLS connections on
++        Python2 or with unsupported SSL implementations.
++        """
++        if self.proxy is None or url_scheme != "https":
++            return
++
++        if self.proxy.scheme != "https":
++            return
++
++        if six.PY2 and not self.proxy_config.use_forwarding_for_https:
++            raise ProxySchemeUnsupported(
++                "Contacting HTTPS destinations through HTTPS proxies "
++                "'via CONNECT tunnels' is not supported in Python 2"
++            )
++
+     def urlopen(self, method, url, redirect=True, **kw):
+         """
+-        Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`
++        Same as :meth:`urllib3.HTTPConnectionPool.urlopen`
+         with custom cross-host redirect logic and only sends the request-uri
+         portion of the ``url``.
+ 
+         The given ``url`` parameter must be absolute, such that an appropriate
+         :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+         """
+         u = parse_url(url)
++        self._validate_proxy_scheme_url_selection(u.scheme)
++
+         conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme)
+ 
+         kw["assert_same_host"] = False
+         kw["redirect"] = False
+ 
+         if "headers" not in kw:
+             kw["headers"] = self.headers.copy()
+ 
+-        if self.proxy is not None and u.scheme == "http":
++        if self._proxy_requires_url_absolute_form(u):
+             response = conn.urlopen(method, url, **kw)
+         else:
+             response = conn.urlopen(method, u.request_uri, **kw)
+ 
+         redirect_location = redirect and response.get_redirect_location()
+         if not redirect_location:
+             return response
+ 
+@@ -387,16 +426,29 @@ class ProxyManager(PoolManager):
+         The URL of the proxy to be used.
+ 
+     :param proxy_headers:
+         A dictionary containing headers that will be sent to the proxy. In case
+         of HTTP they are being sent with each request, while in the
+         HTTPS/CONNECT case they are sent only once. Could be used for proxy
+         authentication.
+ 
++    :param proxy_ssl_context:
++        The proxy SSL context is used to establish the TLS connection to the
++        proxy when using HTTPS proxies.
++
++    :param use_forwarding_for_https:
++        (Defaults to False) If set to True will forward requests to the HTTPS
++        proxy to be made on behalf of the client instead of creating a TLS
++        tunnel via the CONNECT method. **Enabling this flag means that request
++        and response headers and content will be visible from the HTTPS proxy**
++        whereas tunneling keeps request and response headers and content
++        private.  IP address, target hostname, SNI, and port are always visible
++        to an HTTPS proxy even when this flag is disabled.
++
+     Example:
+         >>> proxy = urllib3.ProxyManager('http://localhost:3128/')
+         >>> r1 = proxy.request('GET', 'http://google.com/')
+         >>> r2 = proxy.request('GET', 'http://httpbin.org/')
+         >>> len(proxy.pools)
+         1
+         >>> r3 = proxy.request('GET', 'https://httpbin.org/')
+         >>> r4 = proxy.request('GET', 'https://twitter.com/')
+@@ -406,38 +458,44 @@ class ProxyManager(PoolManager):
+     """
+ 
+     def __init__(
+         self,
+         proxy_url,
+         num_pools=10,
+         headers=None,
+         proxy_headers=None,
++        proxy_ssl_context=None,
++        use_forwarding_for_https=False,
+         **connection_pool_kw
+     ):
+ 
+         if isinstance(proxy_url, HTTPConnectionPool):
+             proxy_url = "%s://%s:%i" % (
+                 proxy_url.scheme,
+                 proxy_url.host,
+                 proxy_url.port,
+             )
+         proxy = parse_url(proxy_url)
++
++        if proxy.scheme not in ("http", "https"):
++            raise ProxySchemeUnknown(proxy.scheme)
++
+         if not proxy.port:
+             port = port_by_scheme.get(proxy.scheme, 80)
+             proxy = proxy._replace(port=port)
+ 
+-        if proxy.scheme not in ("http", "https"):
+-            raise ProxySchemeUnknown(proxy.scheme)
+-
+         self.proxy = proxy
+         self.proxy_headers = proxy_headers or {}
++        self.proxy_ssl_context = proxy_ssl_context
++        self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https)
+ 
+         connection_pool_kw["_proxy"] = self.proxy
+         connection_pool_kw["_proxy_headers"] = self.proxy_headers
++        connection_pool_kw["_proxy_config"] = self.proxy_config
+ 
+         super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw)
+ 
+     def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None):
+         if scheme == "https":
+             return super(ProxyManager, self).connection_from_host(
+                 host, port, scheme, pool_kwargs=pool_kwargs
+             )
+@@ -456,37 +514,23 @@ class ProxyManager(PoolManager):
+         netloc = parse_url(url).netloc
+         if netloc:
+             headers_["Host"] = netloc
+ 
+         if headers:
+             headers_.update(headers)
+         return headers_
+ 
+-    def _validate_proxy_scheme_url_selection(self, url_scheme):
+-        if url_scheme == "https" and self.proxy.scheme == "https":
+-            warnings.warn(
+-                "Your proxy configuration specified an HTTPS scheme for the proxy. "
+-                "Are you sure you want to use HTTPS to contact the proxy? "
+-                "This most likely indicates an error in your configuration. "
+-                "Read this issue for more info: "
+-                "https://github.com/urllib3/urllib3/issues/1850",
+-                InvalidProxyConfigurationWarning,
+-                stacklevel=3,
+-            )
+-
+     def urlopen(self, method, url, redirect=True, **kw):
+         "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+         u = parse_url(url)
+-        self._validate_proxy_scheme_url_selection(u.scheme)
+-
+-        if u.scheme == "http":
+-            # For proxied HTTPS requests, httplib sets the necessary headers
+-            # on the CONNECT to the proxy. For HTTP, we'll definitely
+-            # need to set 'Host' at the very least.
++        if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme):
++            # For connections using HTTP CONNECT, httplib sets the necessary
++            # headers on the CONNECT to the proxy. If we're not using CONNECT,
++            # we'll definitely need to set 'Host' at the very least.
+             headers = kw.get("headers", self.headers)
+             kw["headers"] = self._set_proxy_headers(url, headers)
+ 
+         return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw)
+ 
+ 
+ def proxy_from_url(url, **kw):
+     return ProxyManager(proxy_url=url, **kw)
+diff --git a/third_party/python/urllib3/urllib3/request.py b/third_party/python/urllib3/urllib3/request.py
+--- a/third_party/python/urllib3/urllib3/request.py
++++ b/third_party/python/urllib3/urllib3/request.py
+@@ -1,22 +1,21 @@
+ from __future__ import absolute_import
+ 
+ from .filepost import encode_multipart_formdata
+ from .packages.six.moves.urllib.parse import urlencode
+ 
+-
+ __all__ = ["RequestMethods"]
+ 
+ 
+ class RequestMethods(object):
+     """
+     Convenience mixin for classes who implement a :meth:`urlopen` method, such
+-    as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
+-    :class:`~urllib3.poolmanager.PoolManager`.
++    as :class:`urllib3.HTTPConnectionPool` and
++    :class:`urllib3.PoolManager`.
+ 
+     Provides behavior for making common types of HTTP request methods and
+     decides which type of request field encoding to use.
+ 
+     Specifically,
+ 
+     :meth:`.request_encode_url` is for sending requests whose fields are
+     encoded in the URL (such as GET, HEAD, DELETE).
+@@ -106,19 +105,19 @@ class RequestMethods(object):
+         multipart_boundary=None,
+         **urlopen_kw
+     ):
+         """
+         Make a request using :meth:`urlopen` with the ``fields`` encoded in
+         the body. This is useful for request methods like POST, PUT, PATCH, etc.
+ 
+         When ``encode_multipart=True`` (default), then
+-        :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode
++        :func:`urllib3.encode_multipart_formdata` is used to encode
+         the payload with the appropriate content type. Otherwise
+-        :meth:`urllib.urlencode` is used with the
++        :func:`urllib.parse.urlencode` is used with the
+         'application/x-www-form-urlencoded' content type.
+ 
+         Multipart encoding must be used when posting files, and it's reasonably
+         safe to use it in other times too. However, it may break request
+         signing, such as with OAuth.
+ 
+         Supports an optional ``fields`` parameter of key/value strings AND
+         key/filetuple. A filetuple is a (filename, data, MIME type) tuple where
+diff --git a/third_party/python/urllib3/urllib3/response.py b/third_party/python/urllib3/urllib3/response.py
+--- a/third_party/python/urllib3/urllib3/response.py
++++ b/third_party/python/urllib3/urllib3/response.py
+@@ -1,35 +1,37 @@
+ from __future__ import absolute_import
+-from contextlib import contextmanager
+-import zlib
++
+ import io
+ import logging
++import zlib
++from contextlib import contextmanager
++from socket import error as SocketError
+ from socket import timeout as SocketTimeout
+-from socket import error as SocketError
+ 
+ try:
+     import brotli
+ except ImportError:
+     brotli = None
+ 
+ from ._collections import HTTPHeaderDict
++from .connection import BaseSSLError, HTTPException
+ from .exceptions import (
+     BodyNotHttplibCompatible,
++    DecodeError,
++    HTTPError,
++    IncompleteRead,
++    InvalidChunkLength,
++    InvalidHeader,
+     ProtocolError,
+-    DecodeError,
+     ReadTimeoutError,
+     ResponseNotChunked,
+-    IncompleteRead,
+-    InvalidHeader,
+-    HTTPError,
++    SSLError,
+ )
+-from .packages.six import string_types as basestring, PY3
+-from .packages.six.moves import http_client as httplib
+-from .connection import HTTPException, BaseSSLError
++from .packages import six
+ from .util.response import is_fp_closed, is_response_to_head
+ 
+ log = logging.getLogger(__name__)
+ 
+ 
+ class DeflateDecoder(object):
+     def __init__(self):
+         self._first_try = True
+@@ -102,21 +104,20 @@ class GzipDecoder(object):
+ if brotli is not None:
+ 
+     class BrotliDecoder(object):
+         # Supports both 'brotlipy' and 'Brotli' packages
+         # since they share an import name. The top branches
+         # are for 'brotlipy' and bottom branches for 'Brotli'
+         def __init__(self):
+             self._obj = brotli.Decompressor()
+-
+-        def decompress(self, data):
+             if hasattr(self._obj, "decompress"):
+-                return self._obj.decompress(data)
+-            return self._obj.process(data)
++                self.decompress = self._obj.decompress
++            else:
++                self.decompress = self._obj.process
+ 
+         def flush(self):
+             if hasattr(self._obj, "flush"):
+                 return self._obj.flush()
+             return b""
+ 
+ 
+ class MultiDecoder(object):
+@@ -152,33 +153,33 @@ def _get_decoder(mode):
+ 
+     return DeflateDecoder()
+ 
+ 
+ class HTTPResponse(io.IOBase):
+     """
+     HTTP Response container.
+ 
+-    Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
++    Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is
+     loaded and decoded on-demand when the ``data`` property is accessed.  This
+     class is also compatible with the Python standard library's :mod:`io`
+     module, and can hence be treated as a readable object in the context of that
+     framework.
+ 
+-    Extra parameters for behaviour not present in httplib.HTTPResponse:
++    Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`:
+ 
+     :param preload_content:
+         If True, the response's body will be preloaded during construction.
+ 
+     :param decode_content:
+         If True, will attempt to decode the body based on the
+         'content-encoding' header.
+ 
+     :param original_response:
+-        When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
++        When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse`
+         object, it's convenient to include the original for debug purposes. It's
+         otherwise unused.
+ 
+     :param retries:
+         The retries contains the last :class:`~urllib3.util.retry.Retry` that
+         was used during the request.
+ 
+     :param enforce_content_length:
+@@ -228,17 +229,17 @@ class HTTPResponse(io.IOBase):
+         self._decoder = None
+         self._body = None
+         self._fp = None
+         self._original_response = original_response
+         self._fp_bytes_read = 0
+         self.msg = msg
+         self._request_url = request_url
+ 
+-        if body and isinstance(body, (basestring, bytes)):
++        if body and isinstance(body, (six.string_types, bytes)):
+             self._body = body
+ 
+         self._pool = pool
+         self._connection = connection
+ 
+         if hasattr(body, "read"):
+             self._fp = body
+ 
+@@ -286,35 +287,35 @@ class HTTPResponse(io.IOBase):
+         """
+         try:
+             self.read()
+         except (HTTPError, SocketError, BaseSSLError, HTTPException):
+             pass
+ 
+     @property
+     def data(self):
+-        # For backwords-compat with earlier urllib3 0.4 and earlier.
++        # For backwards-compat with earlier urllib3 0.4 and earlier.
+         if self._body:
+             return self._body
+ 
+         if self._fp:
+             return self.read(cache_content=True)
+ 
+     @property
+     def connection(self):
+         return self._connection
+ 
+     def isclosed(self):
+         return is_fp_closed(self._fp)
+ 
+     def tell(self):
+         """
+         Obtain the number of bytes pulled over the wire so far. May differ from
+-        the amount of content returned by :meth:``HTTPResponse.read`` if bytes
+-        are encoded on the wire (e.g, compressed).
++        the amount of content returned by :meth:``urllib3.response.HTTPResponse.read``
++        if bytes are encoded on the wire (e.g, compressed).
+         """
+         return self._fp_bytes_read
+ 
+     def _init_length(self, request_method):
+         """
+         Set initial length value for Response content if available.
+         """
+         length = self.headers.get("content-length")
+@@ -438,20 +439,19 @@ class HTTPResponse(io.IOBase):
+ 
+             except SocketTimeout:
+                 # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but
+                 # there is yet no clean way to get at it from this context.
+                 raise ReadTimeoutError(self._pool, None, "Read timed out.")
+ 
+             except BaseSSLError as e:
+                 # FIXME: Is there a better way to differentiate between SSLErrors?
+-                if "read operation timed out" not in str(e):  # Defensive:
+-                    # This shouldn't happen but just in case we're missing an edge
+-                    # case, let's avoid swallowing SSL errors.
+-                    raise
++                if "read operation timed out" not in str(e):
++                    # SSL errors related to framing/MAC get wrapped and reraised here
++                    raise SSLError(e)
+ 
+                 raise ReadTimeoutError(self._pool, None, "Read timed out.")
+ 
+             except (HTTPException, SocketError) as e:
+                 # This includes IncompleteRead.
+                 raise ProtocolError("Connection broken: %r" % e, e)
+ 
+             # If no exception is thrown, we should avoid cleaning up
+@@ -475,17 +475,17 @@ class HTTPResponse(io.IOBase):
+ 
+             # If we hold the original response but it's closed now, we should
+             # return the connection back to the pool.
+             if self._original_response and self._original_response.isclosed():
+                 self.release_conn()
+ 
+     def read(self, amt=None, decode_content=None, cache_content=False):
+         """
+-        Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
++        Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
+         parameters: ``decode_content`` and ``cache_content``.
+ 
+         :param amt:
+             How much of the content to read. If specified, caching is skipped
+             because it doesn't make sense to cache partial content as the full
+             response.
+ 
+         :param decode_content:
+@@ -576,46 +576,46 @@ class HTTPResponse(io.IOBase):
+                 data = self.read(amt=amt, decode_content=decode_content)
+ 
+                 if data:
+                     yield data
+ 
+     @classmethod
+     def from_httplib(ResponseCls, r, **response_kw):
+         """
+-        Given an :class:`httplib.HTTPResponse` instance ``r``, return a
++        Given an :class:`http.client.HTTPResponse` instance ``r``, return a
+         corresponding :class:`urllib3.response.HTTPResponse` object.
+ 
+         Remaining parameters are passed to the HTTPResponse constructor, along
+         with ``original_response=r``.
+         """
+         headers = r.msg
+ 
+         if not isinstance(headers, HTTPHeaderDict):
+-            if PY3:
+-                headers = HTTPHeaderDict(headers.items())
+-            else:
++            if six.PY2:
+                 # Python 2.7
+                 headers = HTTPHeaderDict.from_httplib(headers)
++            else:
++                headers = HTTPHeaderDict(headers.items())
+ 
+         # HTTPResponse objects in Python 3 don't have a .strict attribute
+         strict = getattr(r, "strict", 0)
+         resp = ResponseCls(
+             body=r,
+             headers=headers,
+             status=r.status,
+             version=r.version,
+             reason=r.reason,
+             strict=strict,
+             original_response=r,
+             **response_kw
+         )
+         return resp
+ 
+-    # Backwards-compatibility methods for httplib.HTTPResponse
++    # Backwards-compatibility methods for http.client.HTTPResponse
+     def getheaders(self):
+         return self.headers
+ 
+     def getheader(self, name, default=None):
+         return self.headers.get(name, default)
+ 
+     # Backwards compatibility for http.cookiejar
+     def info(self):
+@@ -675,35 +675,35 @@ class HTTPResponse(io.IOBase):
+             return 0
+         else:
+             b[: len(temp)] = temp
+             return len(temp)
+ 
+     def supports_chunked_reads(self):
+         """
+         Checks if the underlying file-like object looks like a
+-        httplib.HTTPResponse object. We do this by testing for the fp
+-        attribute. If it is present we assume it returns raw chunks as
++        :class:`http.client.HTTPResponse` object. We do this by testing for
++        the fp attribute. If it is present we assume it returns raw chunks as
+         processed by read_chunked().
+         """
+         return hasattr(self._fp, "fp")
+ 
+     def _update_chunk_length(self):
+         # First, we'll figure out length of a chunk and then
+         # we'll try to read it from socket.
+         if self.chunk_left is not None:
+             return
+         line = self._fp.fp.readline()
+         line = line.split(b";", 1)[0]
+         try:
+             self.chunk_left = int(line, 16)
+         except ValueError:
+             # Invalid chunked protocol response, abort.
+             self.close()
+-            raise httplib.IncompleteRead(line)
++            raise InvalidChunkLength(self, line)
+ 
+     def _handle_chunk(self, amt):
+         returned_chunk = None
+         if amt is None:
+             chunk = self._fp._safe_read(self.chunk_left)
+             returned_chunk = chunk
+             self._fp._safe_read(2)  # Toss the CRLF at the end of the chunk.
+             self.chunk_left = None
+@@ -740,17 +740,17 @@ class HTTPResponse(io.IOBase):
+         # FIXME: Rewrite this method and make it a class with a better structured logic.
+         if not self.chunked:
+             raise ResponseNotChunked(
+                 "Response is not chunked. "
+                 "Header 'transfer-encoding: chunked' is missing."
+             )
+         if not self.supports_chunked_reads():
+             raise BodyNotHttplibCompatible(
+-                "Body should be httplib.HTTPResponse like. "
++                "Body should be http.client.HTTPResponse like. "
+                 "It should have have an fp attribute which returns raw chunks."
+             )
+ 
+         with self._error_catcher():
+             # Don't bother reading the body of a HEAD request.
+             if self._original_response and is_response_to_head(self._original_response):
+                 self._original_response.close()
+                 return
+diff --git a/third_party/python/urllib3/urllib3/util/__init__.py b/third_party/python/urllib3/urllib3/util/__init__.py
+--- a/third_party/python/urllib3/urllib3/util/__init__.py
++++ b/third_party/python/urllib3/urllib3/util/__init__.py
+@@ -1,46 +1,49 @@
+ from __future__ import absolute_import
+ 
+ # For backwards compatibility, provide imports that used to be here.
+ from .connection import is_connection_dropped
+-from .request import make_headers
++from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers
+ from .response import is_fp_closed
++from .retry import Retry
+ from .ssl_ import (
+-    SSLContext,
++    ALPN_PROTOCOLS,
+     HAS_SNI,
+     IS_PYOPENSSL,
+     IS_SECURETRANSPORT,
++    PROTOCOL_TLS,
++    SSLContext,
+     assert_fingerprint,
+     resolve_cert_reqs,
+     resolve_ssl_version,
+     ssl_wrap_socket,
+-    PROTOCOL_TLS,
+ )
+-from .timeout import current_time, Timeout
+-
+-from .retry import Retry
+-from .url import get_host, parse_url, split_first, Url
++from .timeout import Timeout, current_time
++from .url import Url, get_host, parse_url, split_first
+ from .wait import wait_for_read, wait_for_write
+ 
+ __all__ = (
+     "HAS_SNI",
+     "IS_PYOPENSSL",
+     "IS_SECURETRANSPORT",
+     "SSLContext",
+     "PROTOCOL_TLS",
++    "ALPN_PROTOCOLS",
+     "Retry",
+     "Timeout",
+     "Url",
+     "assert_fingerprint",
+     "current_time",
+     "is_connection_dropped",
+     "is_fp_closed",
+     "get_host",
+     "parse_url",
+     "make_headers",
+     "resolve_cert_reqs",
+     "resolve_ssl_version",
+     "split_first",
+     "ssl_wrap_socket",
+     "wait_for_read",
+     "wait_for_write",
++    "SKIP_HEADER",
++    "SKIPPABLE_HEADERS",
+ )
+diff --git a/third_party/python/urllib3/urllib3/util/connection.py b/third_party/python/urllib3/urllib3/util/connection.py
+--- a/third_party/python/urllib3/urllib3/util/connection.py
++++ b/third_party/python/urllib3/urllib3/util/connection.py
+@@ -1,20 +1,25 @@
+ from __future__ import absolute_import
++
+ import socket
++
++from urllib3.exceptions import LocationParseError
++
++from ..contrib import _appengine_environ
++from ..packages import six
+ from .wait import NoWayToWaitForSocketError, wait_for_read
+-from ..contrib import _appengine_environ
+ 
+ 
+ def is_connection_dropped(conn):  # Platform-specific
+     """
+     Returns True if the connection is dropped and should be closed.
+ 
+     :param conn:
+-        :class:`httplib.HTTPConnection` object.
++        :class:`http.client.HTTPConnection` object.
+ 
+     Note: For platforms like AppEngine, this will always return ``False`` to
+     let the platform handle connection recycling transparently for us.
+     """
+     sock = getattr(conn, "sock", False)
+     if sock is False:  # Platform-specific: AppEngine
+         return False
+     if sock is None:  # Connection already closed (such as by httplib).
+@@ -37,32 +42,39 @@ def create_connection(
+     socket_options=None,
+ ):
+     """Connect to *address* and return the socket object.
+ 
+     Convenience function.  Connect to *address* (a 2-tuple ``(host,
+     port)``) and return the socket object.  Passing the optional
+     *timeout* parameter will set the timeout on the socket instance
+     before attempting to connect.  If no *timeout* is supplied, the
+-    global default timeout setting returned by :func:`getdefaulttimeout`
++    global default timeout setting returned by :func:`socket.getdefaulttimeout`
+     is used.  If *source_address* is set it must be a tuple of (host, port)
+     for the socket to bind as a source address before making the connection.
+     An host of '' or port 0 tells the OS to use the default.
+     """
+ 
+     host, port = address
+     if host.startswith("["):
+         host = host.strip("[]")
+     err = None
+ 
+     # Using the value from allowed_gai_family() in the context of getaddrinfo lets
+     # us select whether to work with IPv4 DNS records, IPv6 records, or both.
+     # The original create_connection function always returns all records.
+     family = allowed_gai_family()
+ 
++    try:
++        host.encode("idna")
++    except UnicodeError:
++        return six.raise_from(
++            LocationParseError(u"'%s', label empty or too long" % host), None
++        )
++
+     for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM):
+         af, socktype, proto, canonname, sa = res
+         sock = None
+         try:
+             sock = socket.socket(af, socktype, proto)
+ 
+             # If provided, set socket level options before connecting.
+             _set_socket_options(sock, socket_options)
+diff --git a/third_party/python/urllib3/urllib3/util/proxy.py b/third_party/python/urllib3/urllib3/util/proxy.py
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3/util/proxy.py
+@@ -0,0 +1,56 @@
++from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version
++
++
++def connection_requires_http_tunnel(
++    proxy_url=None, proxy_config=None, destination_scheme=None
++):
++    """
++    Returns True if the connection requires an HTTP CONNECT through the proxy.
++
++    :param URL proxy_url:
++        URL of the proxy.
++    :param ProxyConfig proxy_config:
++        Proxy configuration from poolmanager.py
++    :param str destination_scheme:
++        The scheme of the destination. (i.e https, http, etc)
++    """
++    # If we're not using a proxy, no way to use a tunnel.
++    if proxy_url is None:
++        return False
++
++    # HTTP destinations never require tunneling, we always forward.
++    if destination_scheme == "http":
++        return False
++
++    # Support for forwarding with HTTPS proxies and HTTPS destinations.
++    if (
++        proxy_url.scheme == "https"
++        and proxy_config
++        and proxy_config.use_forwarding_for_https
++    ):
++        return False
++
++    # Otherwise always use a tunnel.
++    return True
++
++
++def create_proxy_ssl_context(
++    ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None
++):
++    """
++    Generates a default proxy ssl context if one hasn't been provided by the
++    user.
++    """
++    ssl_context = create_urllib3_context(
++        ssl_version=resolve_ssl_version(ssl_version),
++        cert_reqs=resolve_cert_reqs(cert_reqs),
++    )
++    if (
++        not ca_certs
++        and not ca_cert_dir
++        and not ca_cert_data
++        and hasattr(ssl_context, "load_default_certs")
++    ):
++        ssl_context.load_default_certs()
++
++    return ssl_context
+diff --git a/third_party/python/urllib3/urllib3/util/queue.py b/third_party/python/urllib3/urllib3/util/queue.py
+--- a/third_party/python/urllib3/urllib3/util/queue.py
++++ b/third_party/python/urllib3/urllib3/util/queue.py
+@@ -1,9 +1,10 @@
+ import collections
++
+ from ..packages import six
+ from ..packages.six.moves import queue
+ 
+ if six.PY2:
+     # Queue is imported for side effects on MS Windows. See issue #229.
+     import Queue as _unused_module_Queue  # noqa: F401
+ 
+ 
+diff --git a/third_party/python/urllib3/urllib3/util/request.py b/third_party/python/urllib3/urllib3/util/request.py
+--- a/third_party/python/urllib3/urllib3/util/request.py
++++ b/third_party/python/urllib3/urllib3/util/request.py
+@@ -1,13 +1,21 @@
+ from __future__ import absolute_import
++
+ from base64 import b64encode
+ 
++from ..exceptions import UnrewindableBodyError
+ from ..packages.six import b, integer_types
+-from ..exceptions import UnrewindableBodyError
++
++# Pass as a value within ``headers`` to skip
++# emitting some HTTP headers that are added automatically.
++# The only headers that are supported are ``Accept-Encoding``,
++# ``Host``, and ``User-Agent``.
++SKIP_HEADER = "@@@SKIP_HEADER@@@"
++SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
+ 
+ ACCEPT_ENCODING = "gzip,deflate"
+ try:
+     import brotli as _unused_module_brotli  # noqa: F401
+ except ImportError:
+     pass
+ else:
+     ACCEPT_ENCODING += ",br"
+diff --git a/third_party/python/urllib3/urllib3/util/response.py b/third_party/python/urllib3/urllib3/util/response.py
+--- a/third_party/python/urllib3/urllib3/util/response.py
++++ b/third_party/python/urllib3/urllib3/util/response.py
+@@ -1,12 +1,14 @@
+ from __future__ import absolute_import
+-from ..packages.six.moves import http_client as httplib
++
++from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect
+ 
+ from ..exceptions import HeaderParsingError
++from ..packages.six.moves import http_client as httplib
+ 
+ 
+ def is_fp_closed(obj):
+     """
+     Checks whether a given file-like object is closed.
+ 
+     :param obj:
+         The file-like object to check.
+@@ -37,18 +39,17 @@ def is_fp_closed(obj):
+ 
+ def assert_header_parsing(headers):
+     """
+     Asserts whether all headers have been successfully parsed.
+     Extracts encountered errors from the result of parsing headers.
+ 
+     Only works on Python 3.
+ 
+-    :param headers: Headers to verify.
+-    :type headers: `httplib.HTTPMessage`.
++    :param http.client.HTTPMessage headers: Headers to verify.
+ 
+     :raises urllib3.exceptions.HeaderParsingError:
+         If parsing errors are found.
+     """
+ 
+     # This will fail silently if we pass in the wrong kind of parameter.
+     # To make debugging easier add an explicit check.
+     if not isinstance(headers, httplib.HTTPMessage):
+@@ -61,26 +62,46 @@ def assert_header_parsing(headers):
+     if get_payload:
+         # get_payload is actually email.message.Message.get_payload;
+         # we're only interested in the result if it's not a multipart message
+         if not headers.is_multipart():
+             payload = get_payload()
+ 
+             if isinstance(payload, (bytes, str)):
+                 unparsed_data = payload
++    if defects:
++        # httplib is assuming a response body is available
++        # when parsing headers even when httplib only sends
++        # header data to parse_headers() This results in
++        # defects on multipart responses in particular.
++        # See: https://github.com/urllib3/urllib3/issues/800
++
++        # So we ignore the following defects:
++        # - StartBoundaryNotFoundDefect:
++        #     The claimed start boundary was never found.
++        # - MultipartInvariantViolationDefect:
++        #     A message claimed to be a multipart but no subparts were found.
++        defects = [
++            defect
++            for defect in defects
++            if not isinstance(
++                defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect)
++            )
++        ]
+ 
+     if defects or unparsed_data:
+         raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data)
+ 
+ 
+ def is_response_to_head(response):
+     """
+     Checks whether the request of a response has been a HEAD-request.
+     Handles the quirks of AppEngine.
+ 
+-    :param conn:
+-    :type conn: :class:`httplib.HTTPResponse`
++    :param http.client.HTTPResponse response:
++        Response to check if the originating request
++        used 'HEAD' as a method.
+     """
+     # FIXME: Can we do this somehow without accessing private httplib _method?
+     method = response._method
+     if isinstance(method, int):  # Platform-specific: Appengine
+         return method == 3
+     return method.upper() == "HEAD"
+diff --git a/third_party/python/urllib3/urllib3/util/retry.py b/third_party/python/urllib3/urllib3/util/retry.py
+--- a/third_party/python/urllib3/urllib3/util/retry.py
++++ b/third_party/python/urllib3/urllib3/util/retry.py
+@@ -1,39 +1,83 @@
+ from __future__ import absolute_import
++
++import email
++import logging
++import re
+ import time
+-import logging
++import warnings
+ from collections import namedtuple
+ from itertools import takewhile
+-import email
+-import re
+ 
+ from ..exceptions import (
+     ConnectTimeoutError,
++    InvalidHeader,
+     MaxRetryError,
+     ProtocolError,
++    ProxyError,
+     ReadTimeoutError,
+     ResponseError,
+-    InvalidHeader,
+-    ProxyError,
+ )
+ from ..packages import six
+ 
+-
+ log = logging.getLogger(__name__)
+ 
+ 
+ # Data structure for representing the metadata of requests that result in a retry.
+ RequestHistory = namedtuple(
+     "RequestHistory", ["method", "url", "error", "status", "redirect_location"]
+ )
+ 
+ 
++# TODO: In v2 we can remove this sentinel and metaclass with deprecated options.
++_Default = object()
++
++
++class _RetryMeta(type):
++    @property
++    def DEFAULT_METHOD_WHITELIST(cls):
++        warnings.warn(
++            "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
++            "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
++            DeprecationWarning,
++        )
++        return cls.DEFAULT_ALLOWED_METHODS
++
++    @DEFAULT_METHOD_WHITELIST.setter
++    def DEFAULT_METHOD_WHITELIST(cls, value):
++        warnings.warn(
++            "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
++            "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
++            DeprecationWarning,
++        )
++        cls.DEFAULT_ALLOWED_METHODS = value
++
++    @property
++    def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls):
++        warnings.warn(
++            "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
++            "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
++            DeprecationWarning,
++        )
++        return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
++
++    @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter
++    def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
++        warnings.warn(
++            "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
++            "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
++            DeprecationWarning,
++        )
++        cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
++
++
++@six.add_metaclass(_RetryMeta)
+ class Retry(object):
+-    """ Retry configuration.
++    """Retry configuration.
+ 
+     Each retry attempt will create a new Retry object with updated values, so
+     they can be safely reused.
+ 
+     Retries can be defined as a default for a pool::
+ 
+         retries = Retry(connect=5, read=2, redirect=5)
+         http = PoolManager(retries=retries)
+@@ -49,18 +93,17 @@ class Retry(object):
+ 
+     Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
+     retries are disabled, in which case the causing exception will be raised.
+ 
+     :param int total:
+         Total number of retries to allow. Takes precedence over other counts.
+ 
+         Set to ``None`` to remove this constraint and fall back on other
+-        counts. It's a good idea to set this to some sensibly-high value to
+-        account for unexpected edge cases and avoid infinite retry loops.
++        counts.
+ 
+         Set to ``0`` to fail on the first retry.
+ 
+         Set to ``False`` to disable and imply ``raise_on_redirect=False``.
+ 
+     :param int connect:
+         How many connection-related errors to retry on.
+ 
+@@ -91,28 +134,45 @@ class Retry(object):
+     :param int status:
+         How many times to retry on bad status codes.
+ 
+         These are retries made on responses, where status code matches
+         ``status_forcelist``.
+ 
+         Set to ``0`` to fail on the first retry of this type.
+ 
+-    :param iterable method_whitelist:
++    :param int other:
++        How many times to retry on other errors.
++
++        Other errors are errors that are not connect, read, redirect or status errors.
++        These errors might be raised after the request was sent to the server, so the
++        request might have side-effects.
++
++        Set to ``0`` to fail on the first retry of this type.
++
++        If ``total`` is not set, it's a good idea to set this to 0 to account
++        for unexpected edge cases and avoid infinite retry loops.
++
++    :param iterable allowed_methods:
+         Set of uppercased HTTP method verbs that we should retry on.
+ 
+         By default, we only retry on methods which are considered to be
+         idempotent (multiple requests with the same parameters end with the
+-        same state). See :attr:`Retry.DEFAULT_METHOD_WHITELIST`.
++        same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`.
+ 
+         Set to a ``False`` value to retry on any verb.
+ 
++        .. warning::
++
++            Previously this parameter was named ``method_whitelist``, that
++            usage is deprecated in v1.26.0 and will be removed in v2.0.
++
+     :param iterable status_forcelist:
+         A set of integer HTTP status codes that we should force a retry on.
+-        A retry is initiated if the request method is in ``method_whitelist``
++        A retry is initiated if the request method is in ``allowed_methods``
+         and the response status code is in ``status_forcelist``.
+ 
+         By default, this is disabled with ``None``.
+ 
+     :param float backoff_factor:
+         A backoff factor to apply between attempts after the second try
+         (most errors are resolved immediately by a second try without a
+         delay). urllib3 will sleep for::
+@@ -143,81 +203,123 @@ class Retry(object):
+         :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not.
+ 
+     :param iterable remove_headers_on_redirect:
+         Sequence of headers to remove from the request when a response
+         indicating a redirect is returned before firing off the redirected
+         request.
+     """
+ 
+-    DEFAULT_METHOD_WHITELIST = frozenset(
++    #: Default methods to be used for ``allowed_methods``
++    DEFAULT_ALLOWED_METHODS = frozenset(
+         ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
+     )
+ 
++    #: Default status codes to be used for ``status_forcelist``
+     RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
+ 
+-    DEFAULT_REDIRECT_HEADERS_BLACKLIST = frozenset(["Authorization"])
++    #: Default headers to be used for ``remove_headers_on_redirect``
++    DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
+ 
+     #: Maximum backoff time.
+     BACKOFF_MAX = 120
+ 
+     def __init__(
+         self,
+         total=10,
+         connect=None,
+         read=None,
+         redirect=None,
+         status=None,
+-        method_whitelist=DEFAULT_METHOD_WHITELIST,
++        other=None,
++        allowed_methods=_Default,
+         status_forcelist=None,
+         backoff_factor=0,
+         raise_on_redirect=True,
+         raise_on_status=True,
+         history=None,
+         respect_retry_after_header=True,
+-        remove_headers_on_redirect=DEFAULT_REDIRECT_HEADERS_BLACKLIST,
++        remove_headers_on_redirect=_Default,
++        # TODO: Deprecated, remove in v2.0
++        method_whitelist=_Default,
+     ):
+ 
++        if method_whitelist is not _Default:
++            if allowed_methods is not _Default:
++                raise ValueError(
++                    "Using both 'allowed_methods' and "
++                    "'method_whitelist' together is not allowed. "
++                    "Instead only use 'allowed_methods'"
++                )
++            warnings.warn(
++                "Using 'method_whitelist' with Retry is deprecated and "
++                "will be removed in v2.0. Use 'allowed_methods' instead",
++                DeprecationWarning,
++            )
++            allowed_methods = method_whitelist
++        if allowed_methods is _Default:
++            allowed_methods = self.DEFAULT_ALLOWED_METHODS
++        if remove_headers_on_redirect is _Default:
++            remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
++
+         self.total = total
+         self.connect = connect
+         self.read = read
+         self.status = status
++        self.other = other
+ 
+         if redirect is False or total is False:
+             redirect = 0
+             raise_on_redirect = False
+ 
+         self.redirect = redirect
+         self.status_forcelist = status_forcelist or set()
+-        self.method_whitelist = method_whitelist
++        self.allowed_methods = allowed_methods
+         self.backoff_factor = backoff_factor
+         self.raise_on_redirect = raise_on_redirect
+         self.raise_on_status = raise_on_status
+         self.history = history or tuple()
+         self.respect_retry_after_header = respect_retry_after_header
+         self.remove_headers_on_redirect = frozenset(
+             [h.lower() for h in remove_headers_on_redirect]
+         )
+ 
+     def new(self, **kw):
+         params = dict(
+             total=self.total,
+             connect=self.connect,
+             read=self.read,
+             redirect=self.redirect,
+             status=self.status,
+-            method_whitelist=self.method_whitelist,
++            other=self.other,
+             status_forcelist=self.status_forcelist,
+             backoff_factor=self.backoff_factor,
+             raise_on_redirect=self.raise_on_redirect,
+             raise_on_status=self.raise_on_status,
+             history=self.history,
+             remove_headers_on_redirect=self.remove_headers_on_redirect,
+             respect_retry_after_header=self.respect_retry_after_header,
+         )
++
++        # TODO: If already given in **kw we use what's given to us
++        # If not given we need to figure out what to pass. We decide
++        # based on whether our class has the 'method_whitelist' property
++        # and if so we pass the deprecated 'method_whitelist' otherwise
++        # we use 'allowed_methods'. Remove in v2.0
++        if "method_whitelist" not in kw and "allowed_methods" not in kw:
++            if "method_whitelist" in self.__dict__:
++                warnings.warn(
++                    "Using 'method_whitelist' with Retry is deprecated and "
++                    "will be removed in v2.0. Use 'allowed_methods' instead",
++                    DeprecationWarning,
++                )
++                params["method_whitelist"] = self.allowed_methods
++            else:
++                params["allowed_methods"] = self.allowed_methods
++
+         params.update(kw)
+         return type(self)(**params)
+ 
+     @classmethod
+     def from_int(cls, retries, redirect=True, default=None):
+         """ Backwards-compatibility for the old retries format."""
+         if retries is None:
+             retries = default if default is not None else cls.DEFAULT
+@@ -226,17 +328,17 @@ class Retry(object):
+             return retries
+ 
+         redirect = bool(redirect) and None
+         new_retries = cls(retries, redirect=redirect)
+         log.debug("Converted retries value: %r -> %r", retries, new_retries)
+         return new_retries
+ 
+     def get_backoff_time(self):
+-        """ Formula for computing the current backoff
++        """Formula for computing the current backoff
+ 
+         :rtype: float
+         """
+         # We want to consider only the last consecutive errors sequence (Ignore redirects).
+         consecutive_errors_len = len(
+             list(
+                 takewhile(lambda x: x.redirect_location is None, reversed(self.history))
+             )
+@@ -247,20 +349,27 @@ class Retry(object):
+         backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
+         return min(self.BACKOFF_MAX, backoff_value)
+ 
+     def parse_retry_after(self, retry_after):
+         # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
+         if re.match(r"^\s*[0-9]+\s*$", retry_after):
+             seconds = int(retry_after)
+         else:
+-            retry_date_tuple = email.utils.parsedate(retry_after)
++            retry_date_tuple = email.utils.parsedate_tz(retry_after)
+             if retry_date_tuple is None:
+                 raise InvalidHeader("Invalid Retry-After header: %s" % retry_after)
+-            retry_date = time.mktime(retry_date_tuple)
++            if retry_date_tuple[9] is None:  # Python 2
++                # Assume UTC if no timezone was specified
++                # On Python2.7, parsedate_tz returns None for a timezone offset
++                # instead of 0 if no timezone is given, where mktime_tz treats
++                # a None timezone offset as local time.
++                retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
++
++            retry_date = email.utils.mktime_tz(retry_date_tuple)
+             seconds = retry_date - time.time()
+ 
+         if seconds < 0:
+             seconds = 0
+ 
+         return seconds
+ 
+     def get_retry_after(self, response):
+@@ -283,56 +392,67 @@ class Retry(object):
+ 
+     def _sleep_backoff(self):
+         backoff = self.get_backoff_time()
+         if backoff <= 0:
+             return
+         time.sleep(backoff)
+ 
+     def sleep(self, response=None):
+-        """ Sleep between retry attempts.
++        """Sleep between retry attempts.
+ 
+         This method will respect a server's ``Retry-After`` response header
+         and sleep the duration of the time requested. If that is not present, it
+         will use an exponential backoff. By default, the backoff factor is 0 and
+         this method will return immediately.
+         """
+ 
+         if self.respect_retry_after_header and response:
+             slept = self.sleep_for_retry(response)
+             if slept:
+                 return
+ 
+         self._sleep_backoff()
+ 
+     def _is_connection_error(self, err):
+-        """ Errors when we're fairly sure that the server did not receive the
++        """Errors when we're fairly sure that the server did not receive the
+         request, so it should be safe to retry.
+         """
+         if isinstance(err, ProxyError):
+             err = err.original_error
+         return isinstance(err, ConnectTimeoutError)
+ 
+     def _is_read_error(self, err):
+-        """ Errors that occur after the request has been started, so we should
++        """Errors that occur after the request has been started, so we should
+         assume that the server began processing it.
+         """
+         return isinstance(err, (ReadTimeoutError, ProtocolError))
+ 
+     def _is_method_retryable(self, method):
+-        """ Checks if a given HTTP method should be retried upon, depending if
+-        it is included on the method whitelist.
++        """Checks if a given HTTP method should be retried upon, depending if
++        it is included in the allowed_methods
+         """
+-        if self.method_whitelist and method.upper() not in self.method_whitelist:
++        # TODO: For now favor if the Retry implementation sets its own method_whitelist
++        # property outside of our constructor to avoid breaking custom implementations.
++        if "method_whitelist" in self.__dict__:
++            warnings.warn(
++                "Using 'method_whitelist' with Retry is deprecated and "
++                "will be removed in v2.0. Use 'allowed_methods' instead",
++                DeprecationWarning,
++            )
++            allowed_methods = self.method_whitelist
++        else:
++            allowed_methods = self.allowed_methods
++
++        if allowed_methods and method.upper() not in allowed_methods:
+             return False
+-
+         return True
+ 
+     def is_retry(self, method, status_code, has_retry_after=False):
+-        """ Is this method/status code retryable? (Based on whitelists and control
++        """Is this method/status code retryable? (Based on allowlists and control
+         variables such as the number of total retries to allow, whether to
+         respect the Retry-After header, whether this header is present, and
+         whether the returned status code is on the list of status codes to
+         be retried upon on the presence of the aforementioned header)
+         """
+         if not self._is_method_retryable(method):
+             return False
+ 
+@@ -343,33 +463,40 @@ class Retry(object):
+             self.total
+             and self.respect_retry_after_header
+             and has_retry_after
+             and (status_code in self.RETRY_AFTER_STATUS_CODES)
+         )
+ 
+     def is_exhausted(self):
+         """ Are we out of retries? """
+-        retry_counts = (self.total, self.connect, self.read, self.redirect, self.status)
++        retry_counts = (
++            self.total,
++            self.connect,
++            self.read,
++            self.redirect,
++            self.status,
++            self.other,
++        )
+         retry_counts = list(filter(None, retry_counts))
+         if not retry_counts:
+             return False
+ 
+         return min(retry_counts) < 0
+ 
+     def increment(
+         self,
+         method=None,
+         url=None,
+         response=None,
+         error=None,
+         _pool=None,
+         _stacktrace=None,
+     ):
+-        """ Return a new Retry object with incremented retry counters.
++        """Return a new Retry object with incremented retry counters.
+ 
+         :param response: A response object, or None, if the server did not
+             return a response.
+         :type response: :class:`~urllib3.response.HTTPResponse`
+         :param Exception error: An error encountered during the request, or
+             None if the response was received successfully.
+ 
+         :return: A new ``Retry`` object.
+@@ -381,16 +508,17 @@ class Retry(object):
+         total = self.total
+         if total is not None:
+             total -= 1
+ 
+         connect = self.connect
+         read = self.read
+         redirect = self.redirect
+         status_count = self.status
++        other = self.other
+         cause = "unknown"
+         status = None
+         redirect_location = None
+ 
+         if error and self._is_connection_error(error):
+             # Connect retry?
+             if connect is False:
+                 raise six.reraise(type(error), error, _stacktrace)
+@@ -399,27 +527,32 @@ class Retry(object):
+ 
+         elif error and self._is_read_error(error):
+             # Read retry?
+             if read is False or not self._is_method_retryable(method):
+                 raise six.reraise(type(error), error, _stacktrace)
+             elif read is not None:
+                 read -= 1
+ 
++        elif error:
++            # Other retry?
++            if other is not None:
++                other -= 1
++
+         elif response and response.get_redirect_location():
+             # Redirect retry?
+             if redirect is not None:
+                 redirect -= 1
+             cause = "too many redirects"
+             redirect_location = response.get_redirect_location()
+             status = response.status
+ 
+         else:
+             # Incrementing because of a server error like a 500 in
+-            # status_forcelist and a the given method is in the whitelist
++            # status_forcelist and the given method is in the allowed_methods
+             cause = ResponseError.GENERIC_ERROR
+             if response and response.status:
+                 if status_count is not None:
+                     status_count -= 1
+                 cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status)
+                 status = response.status
+ 
+         history = self.history + (
+@@ -427,27 +560,42 @@ class Retry(object):
+         )
+ 
+         new_retry = self.new(
+             total=total,
+             connect=connect,
+             read=read,
+             redirect=redirect,
+             status=status_count,
++            other=other,
+             history=history,
+         )
+ 
+         if new_retry.is_exhausted():
+             raise MaxRetryError(_pool, url, error or ResponseError(cause))
+ 
+         log.debug("Incremented Retry for (url='%s'): %r", url, new_retry)
+ 
+         return new_retry
+ 
+     def __repr__(self):
+         return (
+             "{cls.__name__}(total={self.total}, connect={self.connect}, "
+             "read={self.read}, redirect={self.redirect}, status={self.status})"
+         ).format(cls=type(self), self=self)
+ 
++    def __getattr__(self, item):
++        if item == "method_whitelist":
++            # TODO: Remove this deprecated alias in v2.0
++            warnings.warn(
++                "Using 'method_whitelist' with Retry is deprecated and "
++                "will be removed in v2.0. Use 'allowed_methods' instead",
++                DeprecationWarning,
++            )
++            return self.allowed_methods
++        try:
++            return getattr(super(Retry, self), item)
++        except AttributeError:
++            return getattr(Retry, item)
++
+ 
+ # For backwards compatibility (equivalent to pre-v1.9):
+ Retry.DEFAULT = Retry(3)
+diff --git a/third_party/python/urllib3/urllib3/util/ssl_.py b/third_party/python/urllib3/urllib3/util/ssl_.py
+--- a/third_party/python/urllib3/urllib3/util/ssl_.py
++++ b/third_party/python/urllib3/urllib3/util/ssl_.py
+@@ -1,50 +1,58 @@
+ from __future__ import absolute_import
+-import errno
++
++import hmac
++import os
++import sys
+ import warnings
+-import hmac
+-import sys
+-
+ from binascii import hexlify, unhexlify
+ from hashlib import md5, sha1, sha256
+ 
+-from .url import IPV4_RE, BRACELESS_IPV6_ADDRZ_RE
+-from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
++from ..exceptions import (
++    InsecurePlatformWarning,
++    ProxySchemeUnsupported,
++    SNIMissingWarning,
++    SSLError,
++)
+ from ..packages import six
+-
++from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE
+ 
+ SSLContext = None
++SSLTransport = None
+ HAS_SNI = False
+ IS_PYOPENSSL = False
+ IS_SECURETRANSPORT = False
++ALPN_PROTOCOLS = ["http/1.1"]
+ 
+ # Maps the length of a digest to a possible hash function producing this digest
+ HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256}
+ 
+ 
+ def _const_compare_digest_backport(a, b):
+     """
+     Compare two digests of equal length in constant time.
+ 
+     The digests must be of type str/bytes.
+     Returns True if the digests match, and False otherwise.
+     """
+     result = abs(len(a) - len(b))
+-    for l, r in zip(bytearray(a), bytearray(b)):
+-        result |= l ^ r
++    for left, right in zip(bytearray(a), bytearray(b)):
++        result |= left ^ right
+     return result == 0
+ 
+ 
+ _const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
+ 
+ try:  # Test for SSL features
+     import ssl
+-    from ssl import wrap_socket, CERT_REQUIRED
+     from ssl import HAS_SNI  # Has SNI?
++    from ssl import CERT_REQUIRED, wrap_socket
++
++    from .ssltransport import SSLTransport
+ except ImportError:
+     pass
+ 
+ try:  # Platform-specific: Python 3.6
+     from ssl import PROTOCOL_TLS
+ 
+     PROTOCOL_SSLv23 = PROTOCOL_TLS
+ except ImportError:
+@@ -52,22 +60,28 @@ except ImportError:
+         from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
+ 
+         PROTOCOL_SSLv23 = PROTOCOL_TLS
+     except ImportError:
+         PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+ 
+ 
+ try:
+-    from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
++    from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
+ except ImportError:
+     OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+     OP_NO_COMPRESSION = 0x20000
+ 
+ 
++try:  # OP_NO_TICKET was added in Python 3.6
++    from ssl import OP_NO_TICKET
++except ImportError:
++    OP_NO_TICKET = 0x4000
++
++
+ # A secure default.
+ # Sources for more information on TLS ciphers:
+ #
+ # - https://wiki.mozilla.org/Security/Server_Side_TLS
+ # - https://www.ssllabs.com/projects/best-practices/index.html
+ # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
+ #
+ # The general intent is:
+@@ -244,17 +258,17 @@ def create_urllib3_context(
+         The desired protocol version to use. This will default to
+         PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+         the server and your installation of OpenSSL support.
+     :param cert_reqs:
+         Whether to require the certificate verification. This defaults to
+         ``ssl.CERT_REQUIRED``.
+     :param options:
+         Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+-        ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
++        ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
+     :param ciphers:
+         Which cipher suites to allow the server to select.
+     :returns:
+         Constructed SSLContext object with specified options
+     :rtype: SSLContext
+     """
+     context = SSLContext(ssl_version or PROTOCOL_TLS)
+ 
+@@ -267,16 +281,21 @@ def create_urllib3_context(
+         options = 0
+         # SSLv2 is easily broken and is considered harmful and dangerous
+         options |= OP_NO_SSLv2
+         # SSLv3 has several problems and is now dangerous
+         options |= OP_NO_SSLv3
+         # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+         # (issue #309)
+         options |= OP_NO_COMPRESSION
++        # TLSv1.2 only. Unless set explicitly, do not request tickets.
++        # This may save some bandwidth on wire, and although the ticket is encrypted,
++        # there is a risk associated with it being on wire,
++        # if the server is not rotating its ticketing keys properly.
++        options |= OP_NO_TICKET
+ 
+     context.options |= options
+ 
+     # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is
+     # necessary for conditional client cert authentication with TLS 1.3.
+     # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older
+     # versions of Python.  We only enable on Python 3.7.4+ or if certificate
+     # verification is enabled to work around Python issue #37428
+@@ -288,32 +307,41 @@ def create_urllib3_context(
+ 
+     context.verify_mode = cert_reqs
+     if (
+         getattr(context, "check_hostname", None) is not None
+     ):  # Platform-specific: Python 3.2
+         # We do our own verification, including fingerprints and alternative
+         # hostnames. So disable it here
+         context.check_hostname = False
++
++    # Enable logging of TLS session keys via defacto standard environment variable
++    # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
++    if hasattr(context, "keylog_filename"):
++        sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
++        if sslkeylogfile:
++            context.keylog_filename = sslkeylogfile
++
+     return context
+ 
+ 
+ def ssl_wrap_socket(
+     sock,
+     keyfile=None,
+     certfile=None,
+     cert_reqs=None,
+     ca_certs=None,
+     server_hostname=None,
+     ssl_version=None,
+     ciphers=None,
+     ssl_context=None,
+     ca_cert_dir=None,
+     key_password=None,
+     ca_cert_data=None,
++    tls_in_tls=False,
+ ):
+     """
+     All arguments except for server_hostname, ssl_context, and ca_cert_dir have
+     the same meaning as they do when using :func:`ssl.wrap_socket`.
+ 
+     :param server_hostname:
+         When SNI is supported, the expected hostname of the certificate
+     :param ssl_context:
+@@ -325,35 +353,31 @@ def ssl_wrap_socket(
+         A directory containing CA certificates in multiple separate files, as
+         supported by OpenSSL's -CApath flag or the capath argument to
+         SSLContext.load_verify_locations().
+     :param key_password:
+         Optional password if the keyfile is encrypted.
+     :param ca_cert_data:
+         Optional string containing CA certificates in PEM format suitable for
+         passing as the cadata parameter to SSLContext.load_verify_locations()
++    :param tls_in_tls:
++        Use SSLTransport to wrap the existing socket.
+     """
+     context = ssl_context
+     if context is None:
+         # Note: This branch of code and all the variables in it are no longer
+         # used by urllib3 itself. We should consider deprecating and removing
+         # this code.
+         context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers)
+ 
+     if ca_certs or ca_cert_dir or ca_cert_data:
+         try:
+             context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data)
+-        except IOError as e:  # Platform-specific: Python 2.7
++        except (IOError, OSError) as e:
+             raise SSLError(e)
+-        # Py33 raises FileNotFoundError which subclasses OSError
+-        # These are not equivalent unless we check the errno attribute
+-        except OSError as e:  # Platform-specific: Python 3.3 and beyond
+-            if e.errno == errno.ENOENT:
+-                raise SSLError(e)
+-            raise
+ 
+     elif ssl_context is None and hasattr(context, "load_default_certs"):
+         # try to load OS default certs; works well on Windows (require Python3.4+)
+         context.load_default_certs()
+ 
+     # Attempt to detect if we get the goofy behavior of the
+     # keyfile being encrypted and OpenSSL asking for the
+     # passphrase via the terminal and instead error out.
+@@ -361,38 +385,49 @@ def ssl_wrap_socket(
+         raise SSLError("Client private key is encrypted, password is required")
+ 
+     if certfile:
+         if key_password is None:
+             context.load_cert_chain(certfile, keyfile)
+         else:
+             context.load_cert_chain(certfile, keyfile, key_password)
+ 
++    try:
++        if hasattr(context, "set_alpn_protocols"):
++            context.set_alpn_protocols(ALPN_PROTOCOLS)
++    except NotImplementedError:
++        pass
++
+     # If we detect server_hostname is an IP address then the SNI
+     # extension should not be used according to RFC3546 Section 3.1
+-    # We shouldn't warn the user if SNI isn't available but we would
+-    # not be using SNI anyways due to IP address for server_hostname.
+-    if (
+-        server_hostname is not None and not is_ipaddress(server_hostname)
+-    ) or IS_SECURETRANSPORT:
+-        if HAS_SNI and server_hostname is not None:
+-            return context.wrap_socket(sock, server_hostname=server_hostname)
+-
++    use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
++    # SecureTransport uses server_hostname in certificate verification.
++    send_sni = (use_sni_hostname and HAS_SNI) or (
++        IS_SECURETRANSPORT and server_hostname
++    )
++    # Do not warn the user if server_hostname is an invalid SNI hostname.
++    if not HAS_SNI and use_sni_hostname:
+         warnings.warn(
+             "An HTTPS request has been made, but the SNI (Server Name "
+             "Indication) extension to TLS is not available on this platform. "
+             "This may cause the server to present an incorrect TLS "
+             "certificate, which can cause validation failures. You can upgrade to "
+             "a newer version of Python to solve this. For more information, see "
+             "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
+             "#ssl-warnings",
+             SNIMissingWarning,
+         )
+ 
+-    return context.wrap_socket(sock)
++    if send_sni:
++        ssl_sock = _ssl_wrap_socket_impl(
++            sock, context, tls_in_tls, server_hostname=server_hostname
++        )
++    else:
++        ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls)
++    return ssl_sock
+ 
+ 
+ def is_ipaddress(hostname):
+     """Detects whether the hostname given is an IPv4 or IPv6 address.
+     Also detects IPv6 addresses with Zone IDs.
+ 
+     :param str hostname: Hostname to examine.
+     :return: True if the hostname is an IP address, False otherwise.
+@@ -407,8 +442,25 @@ def _is_key_file_encrypted(key_file):
+     """Detects if a key file is encrypted or not."""
+     with open(key_file, "r") as f:
+         for line in f:
+             # Look for Proc-Type: 4,ENCRYPTED
+             if "ENCRYPTED" in line:
+                 return True
+ 
+     return False
++
++
++def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None):
++    if tls_in_tls:
++        if not SSLTransport:
++            # Import error, ssl is not available.
++            raise ProxySchemeUnsupported(
++                "TLS in TLS requires support for the 'ssl' module"
++            )
++
++        SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context)
++        return SSLTransport(sock, ssl_context, server_hostname)
++
++    if server_hostname:
++        return ssl_context.wrap_socket(sock, server_hostname=server_hostname)
++    else:
++        return ssl_context.wrap_socket(sock)
+diff --git a/third_party/python/urllib3/urllib3/util/ssltransport.py b/third_party/python/urllib3/urllib3/util/ssltransport.py
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3/util/ssltransport.py
+@@ -0,0 +1,221 @@
++import io
++import socket
++import ssl
++
++from urllib3.exceptions import ProxySchemeUnsupported
++from urllib3.packages import six
++
++SSL_BLOCKSIZE = 16384
++
++
++class SSLTransport:
++    """
++    The SSLTransport wraps an existing socket and establishes an SSL connection.
++
++    Contrary to Python's implementation of SSLSocket, it allows you to chain
++    multiple TLS connections together. It's particularly useful if you need to
++    implement TLS within TLS.
++
++    The class supports most of the socket API operations.
++    """
++
++    @staticmethod
++    def _validate_ssl_context_for_tls_in_tls(ssl_context):
++        """
++        Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
++        for TLS in TLS.
++
++        The only requirement is that the ssl_context provides the 'wrap_bio'
++        methods.
++        """
++
++        if not hasattr(ssl_context, "wrap_bio"):
++            if six.PY2:
++                raise ProxySchemeUnsupported(
++                    "TLS in TLS requires SSLContext.wrap_bio() which isn't "
++                    "supported on Python 2"
++                )
++            else:
++                raise ProxySchemeUnsupported(
++                    "TLS in TLS requires SSLContext.wrap_bio() which isn't "
++                    "available on non-native SSLContext"
++                )
++
++    def __init__(
++        self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
++    ):
++        """
++        Create an SSLTransport around socket using the provided ssl_context.
++        """
++        self.incoming = ssl.MemoryBIO()
++        self.outgoing = ssl.MemoryBIO()
++
++        self.suppress_ragged_eofs = suppress_ragged_eofs
++        self.socket = socket
++
++        self.sslobj = ssl_context.wrap_bio(
++            self.incoming, self.outgoing, server_hostname=server_hostname
++        )
++
++        # Perform initial handshake.
++        self._ssl_io_loop(self.sslobj.do_handshake)
++
++    def __enter__(self):
++        return self
++
++    def __exit__(self, *_):
++        self.close()
++
++    def fileno(self):
++        return self.socket.fileno()
++
++    def read(self, len=1024, buffer=None):
++        return self._wrap_ssl_read(len, buffer)
++
++    def recv(self, len=1024, flags=0):
++        if flags != 0:
++            raise ValueError("non-zero flags not allowed in calls to recv")
++        return self._wrap_ssl_read(len)
++
++    def recv_into(self, buffer, nbytes=None, flags=0):
++        if flags != 0:
++            raise ValueError("non-zero flags not allowed in calls to recv_into")
++        if buffer and (nbytes is None):
++            nbytes = len(buffer)
++        elif nbytes is None:
++            nbytes = 1024
++        return self.read(nbytes, buffer)
++
++    def sendall(self, data, flags=0):
++        if flags != 0:
++            raise ValueError("non-zero flags not allowed in calls to sendall")
++        count = 0
++        with memoryview(data) as view, view.cast("B") as byte_view:
++            amount = len(byte_view)
++            while count < amount:
++                v = self.send(byte_view[count:])
++                count += v
++
++    def send(self, data, flags=0):
++        if flags != 0:
++            raise ValueError("non-zero flags not allowed in calls to send")
++        response = self._ssl_io_loop(self.sslobj.write, data)
++        return response
++
++    def makefile(
++        self, mode="r", buffering=None, encoding=None, errors=None, newline=None
++    ):
++        """
++        Python's httpclient uses makefile and buffered io when reading HTTP
++        messages and we need to support it.
++
++        This is unfortunately a copy and paste of socket.py makefile with small
++        changes to point to the socket directly.
++        """
++        if not set(mode) <= {"r", "w", "b"}:
++            raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
++
++        writing = "w" in mode
++        reading = "r" in mode or not writing
++        assert reading or writing
++        binary = "b" in mode
++        rawmode = ""
++        if reading:
++            rawmode += "r"
++        if writing:
++            rawmode += "w"
++        raw = socket.SocketIO(self, rawmode)
++        self.socket._io_refs += 1
++        if buffering is None:
++            buffering = -1
++        if buffering < 0:
++            buffering = io.DEFAULT_BUFFER_SIZE
++        if buffering == 0:
++            if not binary:
++                raise ValueError("unbuffered streams must be binary")
++            return raw
++        if reading and writing:
++            buffer = io.BufferedRWPair(raw, raw, buffering)
++        elif reading:
++            buffer = io.BufferedReader(raw, buffering)
++        else:
++            assert writing
++            buffer = io.BufferedWriter(raw, buffering)
++        if binary:
++            return buffer
++        text = io.TextIOWrapper(buffer, encoding, errors, newline)
++        text.mode = mode
++        return text
++
++    def unwrap(self):
++        self._ssl_io_loop(self.sslobj.unwrap)
++
++    def close(self):
++        self.socket.close()
++
++    def getpeercert(self, binary_form=False):
++        return self.sslobj.getpeercert(binary_form)
++
++    def version(self):
++        return self.sslobj.version()
++
++    def cipher(self):
++        return self.sslobj.cipher()
++
++    def selected_alpn_protocol(self):
++        return self.sslobj.selected_alpn_protocol()
++
++    def selected_npn_protocol(self):
++        return self.sslobj.selected_npn_protocol()
++
++    def shared_ciphers(self):
++        return self.sslobj.shared_ciphers()
++
++    def compression(self):
++        return self.sslobj.compression()
++
++    def settimeout(self, value):
++        self.socket.settimeout(value)
++
++    def gettimeout(self):
++        return self.socket.gettimeout()
++
++    def _decref_socketios(self):
++        self.socket._decref_socketios()
++
++    def _wrap_ssl_read(self, len, buffer=None):
++        try:
++            return self._ssl_io_loop(self.sslobj.read, len, buffer)
++        except ssl.SSLError as e:
++            if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
++                return 0  # eof, return 0.
++            else:
++                raise
++
++    def _ssl_io_loop(self, func, *args):
++        """ Performs an I/O loop between incoming/outgoing and the socket."""
++        should_loop = True
++        ret = None
++
++        while should_loop:
++            errno = None
++            try:
++                ret = func(*args)
++            except ssl.SSLError as e:
++                if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
++                    # WANT_READ, and WANT_WRITE are expected, others are not.
++                    raise e
++                errno = e.errno
++
++            buf = self.outgoing.read()
++            self.socket.sendall(buf)
++
++            if errno is None:
++                should_loop = False
++            elif errno == ssl.SSL_ERROR_WANT_READ:
++                buf = self.socket.recv(SSL_BLOCKSIZE)
++                if buf:
++                    self.incoming.write(buf)
++                else:
++                    self.incoming.write_eof()
++        return ret
+diff --git a/third_party/python/urllib3/urllib3/util/timeout.py b/third_party/python/urllib3/urllib3/util/timeout.py
+--- a/third_party/python/urllib3/urllib3/util/timeout.py
++++ b/third_party/python/urllib3/urllib3/util/timeout.py
+@@ -1,74 +1,81 @@
+ from __future__ import absolute_import
+ 
++import time
++
+ # The default socket timeout, used by httplib to indicate that no timeout was
+ # specified by the user
+ from socket import _GLOBAL_DEFAULT_TIMEOUT
+-import time
+ 
+ from ..exceptions import TimeoutStateError
+ 
+ # A sentinel value to indicate that no timeout was specified by the user in
+ # urllib3
+ _Default = object()
+ 
+ 
+ # Use time.monotonic if available.
+ current_time = getattr(time, "monotonic", time.time)
+ 
+ 
+ class Timeout(object):
+-    """ Timeout configuration.
++    """Timeout configuration.
+ 
+-    Timeouts can be defined as a default for a pool::
++    Timeouts can be defined as a default for a pool:
+ 
+-        timeout = Timeout(connect=2.0, read=7.0)
+-        http = PoolManager(timeout=timeout)
+-        response = http.request('GET', 'http://example.com/')
++    .. code-block:: python
++
++       timeout = Timeout(connect=2.0, read=7.0)
++       http = PoolManager(timeout=timeout)
++       response = http.request('GET', 'http://example.com/')
+ 
+-    Or per-request (which overrides the default for the pool)::
++    Or per-request (which overrides the default for the pool):
+ 
+-        response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
++    .. code-block:: python
++
++       response = http.request('GET', 'http://example.com/', timeout=Timeout(10))
+ 
+-    Timeouts can be disabled by setting all the parameters to ``None``::
++    Timeouts can be disabled by setting all the parameters to ``None``:
+ 
+-        no_timeout = Timeout(connect=None, read=None)
+-        response = http.request('GET', 'http://example.com/, timeout=no_timeout)
++    .. code-block:: python
++
++       no_timeout = Timeout(connect=None, read=None)
++       response = http.request('GET', 'http://example.com/, timeout=no_timeout)
+ 
+ 
+     :param total:
+         This combines the connect and read timeouts into one; the read timeout
+         will be set to the time leftover from the connect attempt. In the
+         event that both a connect timeout and a total are specified, or a read
+         timeout and a total are specified, the shorter timeout will be applied.
+ 
+         Defaults to None.
+ 
+-    :type total: integer, float, or None
++    :type total: int, float, or None
+ 
+     :param connect:
+         The maximum amount of time (in seconds) to wait for a connection
+         attempt to a server to succeed. Omitting the parameter will default the
+         connect timeout to the system default, probably `the global default
+         timeout in socket.py
+         <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+         None will set an infinite timeout for connection attempts.
+ 
+-    :type connect: integer, float, or None
++    :type connect: int, float, or None
+ 
+     :param read:
+         The maximum amount of time (in seconds) to wait between consecutive
+         read operations for a response from the server. Omitting the parameter
+         will default the read timeout to the system default, probably `the
+         global default timeout in socket.py
+         <http://hg.python.org/cpython/file/603b4d593758/Lib/socket.py#l535>`_.
+         None will set an infinite timeout.
+ 
+-    :type read: integer, float, or None
++    :type read: int, float, or None
+ 
+     .. note::
+ 
+         Many factors can affect the total amount of time for urllib3 to return
+         an HTTP response.
+ 
+         For example, Python's DNS resolver does not obey the timeout specified
+         on the socket. Other factors that can affect total request time include
+@@ -106,17 +113,17 @@ class Timeout(object):
+             self.total,
+         )
+ 
+     # __str__ provided for backwards compatibility
+     __str__ = __repr__
+ 
+     @classmethod
+     def _validate_timeout(cls, value, name):
+-        """ Check that a timeout attribute is valid.
++        """Check that a timeout attribute is valid.
+ 
+         :param value: The timeout value to validate
+         :param name: The name of the timeout attribute to validate. This is
+             used to specify in error messages.
+         :return: The validated and casted version of the given value.
+         :raises ValueError: If it is a numeric value less than or equal to
+             zero, or the type is not an integer, float, or None.
+         """
+@@ -152,72 +159,72 @@ class Timeout(object):
+                 "Timeout value %s was %s, but it must be an "
+                 "int, float or None." % (name, value)
+             )
+ 
+         return value
+ 
+     @classmethod
+     def from_float(cls, timeout):
+-        """ Create a new Timeout from a legacy timeout value.
++        """Create a new Timeout from a legacy timeout value.
+ 
+         The timeout value used by httplib.py sets the same timeout on the
+         connect(), and recv() socket requests. This creates a :class:`Timeout`
+         object that sets the individual timeouts to the ``timeout`` value
+         passed to this function.
+ 
+         :param timeout: The legacy timeout value.
+         :type timeout: integer, float, sentinel default object, or None
+         :return: Timeout object
+         :rtype: :class:`Timeout`
+         """
+         return Timeout(read=timeout, connect=timeout)
+ 
+     def clone(self):
+-        """ Create a copy of the timeout object
++        """Create a copy of the timeout object
+ 
+         Timeout properties are stored per-pool but each request needs a fresh
+         Timeout object to ensure each one has its own start/stop configured.
+ 
+         :return: a copy of the timeout object
+         :rtype: :class:`Timeout`
+         """
+         # We can't use copy.deepcopy because that will also create a new object
+         # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to
+         # detect the user default.
+         return Timeout(connect=self._connect, read=self._read, total=self.total)
+ 
+     def start_connect(self):
+-        """ Start the timeout clock, used during a connect() attempt
++        """Start the timeout clock, used during a connect() attempt
+ 
+         :raises urllib3.exceptions.TimeoutStateError: if you attempt
+             to start a timer that has been started already.
+         """
+         if self._start_connect is not None:
+             raise TimeoutStateError("Timeout timer has already been started.")
+         self._start_connect = current_time()
+         return self._start_connect
+ 
+     def get_connect_duration(self):
+-        """ Gets the time elapsed since the call to :meth:`start_connect`.
++        """Gets the time elapsed since the call to :meth:`start_connect`.
+ 
+         :return: Elapsed time in seconds.
+         :rtype: float
+         :raises urllib3.exceptions.TimeoutStateError: if you attempt
+             to get duration for a timer that hasn't been started.
+         """
+         if self._start_connect is None:
+             raise TimeoutStateError(
+                 "Can't get connect duration for timer that has not started."
+             )
+         return current_time() - self._start_connect
+ 
+     @property
+     def connect_timeout(self):
+-        """ Get the value to use when setting a connection timeout.
++        """Get the value to use when setting a connection timeout.
+ 
+         This will be a positive float or integer, the value None
+         (never timeout), or the default system timeout.
+ 
+         :return: Connect timeout.
+         :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None
+         """
+         if self.total is None:
+@@ -225,17 +232,17 @@ class Timeout(object):
+ 
+         if self._connect is None or self._connect is self.DEFAULT_TIMEOUT:
+             return self.total
+ 
+         return min(self._connect, self.total)
+ 
+     @property
+     def read_timeout(self):
+-        """ Get the value for the read timeout.
++        """Get the value for the read timeout.
+ 
+         This assumes some time has elapsed in the connection timeout and
+         computes the read timeout appropriately.
+ 
+         If self.total is set, the read timeout is dependent on the amount of
+         time taken by the connect timeout. If the connection time has not been
+         established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be
+         raised.
+diff --git a/third_party/python/urllib3/urllib3/util/url.py b/third_party/python/urllib3/urllib3/util/url.py
+--- a/third_party/python/urllib3/urllib3/util/url.py
++++ b/third_party/python/urllib3/urllib3/util/url.py
+@@ -1,16 +1,16 @@
+ from __future__ import absolute_import
++
+ import re
+ from collections import namedtuple
+ 
+ from ..exceptions import LocationParseError
+ from ..packages import six
+ 
+-
+ url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"]
+ 
+ # We only want to normalize urls with an HTTP(S) scheme.
+ # urllib3 infers URLs without a scheme (None) to be http.
+ NORMALIZABLE_SCHEMES = ("http", "https", None)
+ 
+ # Almost all of these patterns were derived from the
+ # 'rfc3986' module: https://github.com/python-hyper/rfc3986
+diff --git a/third_party/python/urllib3/urllib3/util/wait.py b/third_party/python/urllib3/urllib3/util/wait.py
+--- a/third_party/python/urllib3/urllib3/util/wait.py
++++ b/third_party/python/urllib3/urllib3/util/wait.py
+@@ -1,12 +1,12 @@
+ import errno
+-from functools import partial
+ import select
+ import sys
++from functools import partial
+ 
+ try:
+     from time import monotonic
+ except ImportError:
+     from time import time as monotonic
+ 
+ __all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"]
+ 
+@@ -135,19 +135,19 @@ def wait_for_socket(*args, **kwargs):
+     elif hasattr(select, "select"):
+         wait_for_socket = select_wait_for_socket
+     else:  # Platform-specific: Appengine.
+         wait_for_socket = null_wait_for_socket
+     return wait_for_socket(*args, **kwargs)
+ 
+ 
+ def wait_for_read(sock, timeout=None):
+-    """ Waits for reading to be available on a given socket.
++    """Waits for reading to be available on a given socket.
+     Returns True if the socket is readable, or False if the timeout expired.
+     """
+     return wait_for_socket(sock, read=True, timeout=timeout)
+ 
+ 
+ def wait_for_write(sock, timeout=None):
+-    """ Waits for writing to be available on a given socket.
++    """Waits for writing to be available on a given socket.
+     Returns True if the socket is readable, or False if the timeout expired.
+     """
+     return wait_for_socket(sock, write=True, timeout=timeout)
+diff --git a/tools/moztreedocs/requirements.in.1715900.later b/tools/moztreedocs/requirements.in.1715900.later
+new file mode 100644
+--- /dev/null
++++ b/tools/moztreedocs/requirements.in.1715900.later
+@@ -0,0 +1,19 @@
++--- requirements.in
+++++ requirements.in
++@@ -1,14 +1,14 @@
++ # pip freeze > requirements.in
++ alabaster==0.7.12
++ Babel==2.8.0
++ backports-abc==0.5
++-boto3==1.15.6
++-botocore==1.18.6
+++boto3==1.16.63
+++botocore==1.19.63
++ certifi==2020.6.20
++ chardet==3.0.4
++ colorama==0.4.4
++ commonmark==0.9.1
++ docutils==0.16
++ fluent.pygments==1.0
++ fluent.syntax==0.18.1
++ idna==2.10
+diff --git a/tools/moztreedocs/requirements.txt.1715900.later b/tools/moztreedocs/requirements.txt.1715900.later
+new file mode 100644
+--- /dev/null
++++ b/tools/moztreedocs/requirements.txt.1715900.later
+@@ -0,0 +1,32 @@
++--- requirements.txt
+++++ requirements.txt
++@@ -21,23 +21,23 @@ babel==2.8.0 \
++     --hash=sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4
++     # via
++     #   -r requirements.in
++     #   sphinx
++ backports-abc==0.5 \
++     --hash=sha256:033be54514a03e255df75c5aee8f9e672f663f93abb723444caec8fe43437bde \
++     --hash=sha256:52089f97fe7a9aa0d3277b220c1d730a85aefd64e1b2664696fe35317c5470a7
++     # via -r requirements.in
++-boto3==1.15.6 \
++-    --hash=sha256:87534080a5addad135fcd631fa8b57a12e1a234c23d86521e84fbbd9217fd6a5 \
++-    --hash=sha256:c4c84c6647e84a9f270d86da7eea1a250c2529e26ddb39320546f235327f10e6
+++boto3==1.16.63 \
+++    --hash=sha256:1c0003609e63e8cff51dee7a49e904bcdb20e140b5f7a10a03006289fd8c8dc1 \
+++    --hash=sha256:c919dac9773115025e1e2a7e462f60ca082e322bb6f4354247523e4226133b0b
++     # via -r requirements.in
++-botocore==1.18.6 \
++-    --hash=sha256:31f04b68a6ebe8cfa97b4d70f54f29aef8b6a0bc9c4da7b8ee9b6a53fc69edae \
++-    --hash=sha256:3de32a03679bb172a41c38e3c9af3f7259f3637f705aa2ac384b3233dc985b85
+++botocore==1.19.63 \
+++    --hash=sha256:ad4adfcc195b5401d84b0c65d3a89e507c1d54c201879c8761ff10ef5c361e21 \
+++    --hash=sha256:d3694f6ef918def8082513e5ef309cd6cd83b612e9984e3a66e8adc98c650a92
++     # via
++     #   -r requirements.in
++     #   boto3
++     #   s3transfer
++ certifi==2020.6.20 \
++     --hash=sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3 \
++     --hash=sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41
++     # via

+ 3548 - 0
mozilla-release/patches/1857492-120a1.patch

@@ -0,0 +1,3548 @@
+# HG changeset patch
+# User serge-sans-paille <sguelton@mozilla.com>
+# Date 1697463324 0
+# Node ID 47b8e4dba076af433fe0755cfc02497cdaa6f346
+# Parent  cd962c95e9f1332303c580b46ceb671222d0b32b
+Bug 1857492 - Upgrade vendored version of six and urllib3 r=saschanaz
+
+six -> 1.16
+urllib3 -> 1.26.17
+
+Differential Revision: https://phabricator.services.mozilla.com/D190288
+
+diff --git a/third_party/python/poetry.lock.1857492.later b/third_party/python/poetry.lock.1857492.later
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/poetry.lock.1857492.later
+@@ -0,0 +1,78 @@
++--- poetry.lock
+++++ poetry.lock
++@@ -1328,24 +1328,24 @@ files = [
++ 
++ [package.extras]
++ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"]
++ testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
++ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
++ 
++ [[package]]
++ name = "six"
++-version = "1.13.0"
+++version = "1.16.0"
++ description = "Python 2 and 3 compatibility utilities"
++ category = "main"
++ optional = false
++-python-versions = ">=2.6, !=3.0.*, !=3.1.*"
+++python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
++ files = [
++-    {file = "six-1.13.0-py2.py3-none-any.whl", hash = "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd"},
++-    {file = "six-1.13.0.tar.gz", hash = "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"},
+++    {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
+++    {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
++ ]
++ 
++ [[package]]
++ name = "slugid"
++ version = "2.0.0"
++ description = "Base64 encoded uuid v4 slugs"
++ category = "main"
++ optional = false
++@@ -1486,29 +1486,29 @@ python-versions = "*"
++ files = [
++     {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"},
++     {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"},
++     {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"},
++ ]
++ 
++ [[package]]
++ name = "urllib3"
++-version = "1.26.0"
+++version = "1.26.17"
++ description = "HTTP library with thread-safe connection pooling, file post, and more."
++ category = "main"
++ optional = false
++-python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4"
+++python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*"
++ files = [
++-    {file = "urllib3-1.26.0-py2.py3-none-any.whl", hash = "sha256:bad31cb622ceee0ab46c4c884cf61957def0ff2e644de0a7a093678844c9ccac"},
++-    {file = "urllib3-1.26.0.tar.gz", hash = "sha256:4849f132941d68144df0a3785ccc4fe423430ba5db0108d045c8cadbc90f517a"},
+++    {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"},
+++    {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"},
++ ]
++ 
++ [package.extras]
++-brotli = ["brotlipy (>=0.6.0)"]
++-secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"]
+++brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
+++secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
++ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
++ 
++ [[package]]
++ name = "voluptuous"
++ version = "0.12.1"
++ description = ""
++ category = "main"
++ optional = false
++@@ -1627,9 +1627,9 @@ files = [
++ 
++ [package.extras]
++ docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"]
++ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=1.2.3)", "pytest-cov", "pytest-enabler", "pytest-flake8", "pytest-mypy"]
++ 
++ [metadata]
++ lock-version = "2.0"
++ python-versions = "^3.7"
++-content-hash = "b89a981fdfc24399f58ed31b43fc5e89ffc15908642055a6216fea9615eea0af"
+++content-hash = "362f79e0ac281ba4f9175ed68dc2925aac1403215ffe4ef462f35e8ded19fc05"
+diff --git a/third_party/python/requirements.in b/third_party/python/requirements.in
+--- a/third_party/python/requirements.in
++++ b/third_party/python/requirements.in
+@@ -34,12 +34,12 @@ ply==3.10
+ pyasn1==0.4.8
+ pytest==3.6.2
+ python-hglib==2.4
+ pytoml==0.1.10
+ pyyaml==5.4.1
+ redo==2.0.3
+ requests==2.25.1
+ responses==0.10.6
+-six==1.13.0
+-urllib3==1.26
++six==1.16.0
++urllib3==1.26.17
+ voluptuous==0.11.5
+ yamllint==1.23
+diff --git a/third_party/python/requirements.txt b/third_party/python/requirements.txt
+--- a/third_party/python/requirements.txt
++++ b/third_party/python/requirements.txt
+@@ -185,31 +185,31 @@ requests==2.25.1 \
+     --hash=sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e
+     # via
+     #   -r requirements-mach-vendor-python.in
+     #   responses
+ responses==0.10.6 \
+     --hash=sha256:502d9c0c8008439cfcdef7e251f507fcfdd503b56e8c0c87c3c3e3393953f790 \
+     --hash=sha256:97193c0183d63fba8cd3a041c75464e4b09ea0aff6328800d1546598567dde0b
+     # via -r requirements-mach-vendor-python.in
+-six==1.13.0 \
+-    --hash=sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd \
+-    --hash=sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66
++six==1.16.0 ; python_version >= "3.7" and python_version < "4.0" \
++    --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
++    --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
+     # via
+     #   -r requirements-mach-vendor-python.in
+     #   blessings
+     #   compare-locales
+     #   ecdsa
+     #   fluent.migrate
+     #   more-itertools
+     #   pytest
+     #   responses
+-urllib3==1.26.0 \
+-    --hash=sha256:4849f132941d68144df0a3785ccc4fe423430ba5db0108d045c8cadbc90f517a \
+-    --hash=sha256:bad31cb622ceee0ab46c4c884cf61957def0ff2e644de0a7a093678844c9ccac
++urllib3==1.26.17 ; python_version >= "3.7" and python_version < "4.0" \
++    --hash=sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21 \
++    --hash=sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b
+     # via
+     #   -r requirements-mach-vendor-python.in
+     #   requests
+ voluptuous==0.11.5 \
+     --hash=sha256:303542b3fc07fb52ec3d7a1c614b329cdbee13a9d681935353d8ea56a7bfa9f1 \
+     --hash=sha256:567a56286ef82a9d7ae0628c5842f65f516abcb496e74f3f59f1d7b28df314ef
+     # via -r requirements-mach-vendor-python.in
+ yamllint==1.23 \
+diff --git a/third_party/python/six/six-1.13.0.dist-info/RECORD b/third_party/python/six/six-1.13.0.dist-info/RECORD
+deleted file mode 100644
+--- a/third_party/python/six/six-1.13.0.dist-info/RECORD
++++ /dev/null
+@@ -1,6 +0,0 @@
+-six.py,sha256=bsEzSFTZTx49wQttLORmSZTrpjGc8UbXt-HBa_LZX7Q,33045
+-six-1.13.0.dist-info/LICENSE,sha256=t1KbjAcXGniow2wyg5BVKOSBKUXZd9El65JujMvyRbY,1066
+-six-1.13.0.dist-info/METADATA,sha256=hxS4rSPRfO8ewbcLS30anoFi6LFgUQ3mk_xknZ8RV4w,1940
+-six-1.13.0.dist-info/WHEEL,sha256=8zNYZbwQSXoB9IfXOjPfeNwvAsALAjffgk27FqvCWbo,110
+-six-1.13.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4
+-six-1.13.0.dist-info/RECORD,,
+diff --git a/third_party/python/six/six-1.13.0.dist-info/LICENSE b/third_party/python/six/six-1.16.0.dist-info/LICENSE
+rename from third_party/python/six/six-1.13.0.dist-info/LICENSE
+rename to third_party/python/six/six-1.16.0.dist-info/LICENSE
+--- a/third_party/python/six/six-1.13.0.dist-info/LICENSE
++++ b/third_party/python/six/six-1.16.0.dist-info/LICENSE
+@@ -1,9 +1,9 @@
+-Copyright (c) 2010-2019 Benjamin Peterson
++Copyright (c) 2010-2020 Benjamin Peterson
+ 
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
+ this software and associated documentation files (the "Software"), to deal in
+ the Software without restriction, including without limitation the rights to
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+ the Software, and to permit persons to whom the Software is furnished to do so,
+ subject to the following conditions:
+ 
+diff --git a/third_party/python/six/six-1.13.0.dist-info/METADATA b/third_party/python/six/six-1.16.0.dist-info/METADATA
+rename from third_party/python/six/six-1.13.0.dist-info/METADATA
+rename to third_party/python/six/six-1.16.0.dist-info/METADATA
+--- a/third_party/python/six/six-1.13.0.dist-info/METADATA
++++ b/third_party/python/six/six-1.16.0.dist-info/METADATA
+@@ -1,25 +1,25 @@
+ Metadata-Version: 2.1
+ Name: six
+-Version: 1.13.0
++Version: 1.16.0
+ Summary: Python 2 and 3 compatibility utilities
+ Home-page: https://github.com/benjaminp/six
+ Author: Benjamin Peterson
+ Author-email: benjamin@python.org
+ License: MIT
+ Platform: UNKNOWN
+ Classifier: Development Status :: 5 - Production/Stable
+ Classifier: Programming Language :: Python :: 2
+ Classifier: Programming Language :: Python :: 3
+ Classifier: Intended Audience :: Developers
+ Classifier: License :: OSI Approved :: MIT License
+ Classifier: Topic :: Software Development :: Libraries
+ Classifier: Topic :: Utilities
+-Requires-Python: >=2.6, !=3.0.*, !=3.1.*
++Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*
+ 
+ .. image:: https://img.shields.io/pypi/v/six.svg
+    :target: https://pypi.org/project/six/
+    :alt: six on PyPI
+ 
+ .. image:: https://travis-ci.org/benjaminp/six.svg?branch=master
+    :target: https://travis-ci.org/benjaminp/six
+    :alt: six on TravisCI
+@@ -32,21 +32,18 @@ Requires-Python: >=2.6, !=3.0.*, !=3.1.*
+    :target: https://github.com/benjaminp/six/blob/master/LICENSE
+    :alt: MIT License badge
+ 
+ Six is a Python 2 and 3 compatibility library.  It provides utility functions
+ for smoothing over the differences between the Python versions with the goal of
+ writing Python code that is compatible on both Python versions.  See the
+ documentation for more information on what is provided.
+ 
+-Six supports every Python version since 2.6.  It is contained in only one Python
++Six supports Python 2.7 and 3.3+.  It is contained in only one Python
+ file, so it can be easily copied into your project. (The copyright and license
+ notice must be retained.)
+ 
+ Online documentation is at https://six.readthedocs.io/.
+ 
+ Bugs can be reported to https://github.com/benjaminp/six.  The code can also
+ be found there.
+ 
+-For questions about six or porting in general, email the python-porting mailing
+-list: https://mail.python.org/mailman/listinfo/python-porting
+ 
+-
+diff --git a/third_party/python/six/six-1.16.0.dist-info/RECORD b/third_party/python/six/six-1.16.0.dist-info/RECORD
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/six/six-1.16.0.dist-info/RECORD
+@@ -0,0 +1,6 @@
++six.py,sha256=TOOfQi7nFGfMrIvtdr6wX4wyHH8M7aknmuLfo2cBBrM,34549
++six-1.16.0.dist-info/LICENSE,sha256=i7hQxWWqOJ_cFvOkaWWtI9gq3_YPI5P8J2K2MYXo5sk,1066
++six-1.16.0.dist-info/METADATA,sha256=VQcGIFCAEmfZcl77E5riPCN4v2TIsc_qtacnjxKHJoI,1795
++six-1.16.0.dist-info/WHEEL,sha256=Z-nyYpwrcSqxfdux5Mbn_DQ525iP7J2DG3JgGvOYyTQ,110
++six-1.16.0.dist-info/top_level.txt,sha256=_iVH_iYEtEXnD8nYGQYpYFUvkUW9sEO1GYbkeKSAais,4
++six-1.16.0.dist-info/RECORD,,
+diff --git a/third_party/python/six/six-1.13.0.dist-info/WHEEL b/third_party/python/six/six-1.16.0.dist-info/WHEEL
+rename from third_party/python/six/six-1.13.0.dist-info/WHEEL
+rename to third_party/python/six/six-1.16.0.dist-info/WHEEL
+--- a/third_party/python/six/six-1.13.0.dist-info/WHEEL
++++ b/third_party/python/six/six-1.16.0.dist-info/WHEEL
+@@ -1,6 +1,6 @@
+ Wheel-Version: 1.0
+-Generator: bdist_wheel (0.33.6)
++Generator: bdist_wheel (0.36.2)
+ Root-Is-Purelib: true
+ Tag: py2-none-any
+ Tag: py3-none-any
+ 
+diff --git a/third_party/python/six/six-1.13.0.dist-info/top_level.txt b/third_party/python/six/six-1.16.0.dist-info/top_level.txt
+rename from third_party/python/six/six-1.13.0.dist-info/top_level.txt
+rename to third_party/python/six/six-1.16.0.dist-info/top_level.txt
+diff --git a/third_party/python/six/six.py b/third_party/python/six/six.py
+--- a/third_party/python/six/six.py
++++ b/third_party/python/six/six.py
+@@ -1,9 +1,9 @@
+-# Copyright (c) 2010-2019 Benjamin Peterson
++# Copyright (c) 2010-2020 Benjamin Peterson
+ #
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
+ # of this software and associated documentation files (the "Software"), to deal
+ # in the Software without restriction, including without limitation the rights
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ # copies of the Software, and to permit persons to whom the Software is
+ # furnished to do so, subject to the following conditions:
+ #
+@@ -24,17 +24,17 @@ from __future__ import absolute_import
+ 
+ import functools
+ import itertools
+ import operator
+ import sys
+ import types
+ 
+ __author__ = "Benjamin Peterson <benjamin@python.org>"
+-__version__ = "1.13.0"
++__version__ = "1.16.0"
+ 
+ 
+ # Useful for very coarse version differentiation.
+ PY2 = sys.version_info[0] == 2
+ PY3 = sys.version_info[0] == 3
+ PY34 = sys.version_info[0:2] >= (3, 4)
+ 
+ if PY3:
+@@ -66,16 +66,21 @@ else:
+         except OverflowError:
+             # 32-bit
+             MAXSIZE = int((1 << 31) - 1)
+         else:
+             # 64-bit
+             MAXSIZE = int((1 << 63) - 1)
+         del X
+ 
++if PY34:
++    from importlib.util import spec_from_loader
++else:
++    spec_from_loader = None
++
+ 
+ def _add_doc(func, doc):
+     """Add documentation to a function."""
+     func.__doc__ = doc
+ 
+ 
+ def _import_module(name):
+     """Import module, returning the module after the last dot."""
+@@ -181,16 +186,21 @@ class _SixMetaPathImporter(object):
+     def _get_module(self, fullname):
+         return self.known_modules[self.name + "." + fullname]
+ 
+     def find_module(self, fullname, path=None):
+         if fullname in self.known_modules:
+             return self
+         return None
+ 
++    def find_spec(self, fullname, path, target=None):
++        if fullname in self.known_modules:
++            return spec_from_loader(fullname, self)
++        return None
++
+     def __get_module(self, fullname):
+         try:
+             return self.known_modules[fullname]
+         except KeyError:
+             raise ImportError("This loader does not know module " + fullname)
+ 
+     def load_module(self, fullname):
+         try:
+@@ -218,16 +228,22 @@ class _SixMetaPathImporter(object):
+     def get_code(self, fullname):
+         """Return None
+ 
+         Required, if is_package is implemented"""
+         self.__get_module(fullname)  # eventually raises ImportError
+         return None
+     get_source = get_code  # same as get_code
+ 
++    def create_module(self, spec):
++        return self.load_module(spec.name)
++
++    def exec_module(self, module):
++        pass
++
+ _importer = _SixMetaPathImporter(__name__)
+ 
+ 
+ class _MovedItems(_LazyModule):
+ 
+     """Lazy loading of moved objects"""
+     __path__ = []  # mark as package
+ 
+@@ -254,17 +270,17 @@ class _MovedItems(_LazyModule):
+     MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+     MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+     MovedModule("builtins", "__builtin__"),
+     MovedModule("configparser", "ConfigParser"),
+     MovedModule("collections_abc", "collections", "collections.abc" if sys.version_info >= (3, 3) else "collections"),
+     MovedModule("copyreg", "copy_reg"),
+     MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+     MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
+-    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
++    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread" if sys.version_info < (3, 9) else "_thread"),
+     MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+     MovedModule("http_cookies", "Cookie", "http.cookies"),
+     MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+     MovedModule("html_parser", "HTMLParser", "html.parser"),
+     MovedModule("http_client", "httplib", "http.client"),
+     MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+     MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+     MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+@@ -639,19 +655,21 @@ if PY3:
+     import io
+     StringIO = io.StringIO
+     BytesIO = io.BytesIO
+     del io
+     _assertCountEqual = "assertCountEqual"
+     if sys.version_info[1] <= 1:
+         _assertRaisesRegex = "assertRaisesRegexp"
+         _assertRegex = "assertRegexpMatches"
++        _assertNotRegex = "assertNotRegexpMatches"
+     else:
+         _assertRaisesRegex = "assertRaisesRegex"
+         _assertRegex = "assertRegex"
++        _assertNotRegex = "assertNotRegex"
+ else:
+     def b(s):
+         return s
+     # Workaround for standalone backslash
+ 
+     def u(s):
+         return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+     unichr = unichr
+@@ -663,32 +681,37 @@ else:
+     def indexbytes(buf, i):
+         return ord(buf[i])
+     iterbytes = functools.partial(itertools.imap, ord)
+     import StringIO
+     StringIO = BytesIO = StringIO.StringIO
+     _assertCountEqual = "assertItemsEqual"
+     _assertRaisesRegex = "assertRaisesRegexp"
+     _assertRegex = "assertRegexpMatches"
++    _assertNotRegex = "assertNotRegexpMatches"
+ _add_doc(b, """Byte literal""")
+ _add_doc(u, """Text literal""")
+ 
+ 
+ def assertCountEqual(self, *args, **kwargs):
+     return getattr(self, _assertCountEqual)(*args, **kwargs)
+ 
+ 
+ def assertRaisesRegex(self, *args, **kwargs):
+     return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+ 
+ 
+ def assertRegex(self, *args, **kwargs):
+     return getattr(self, _assertRegex)(*args, **kwargs)
+ 
+ 
++def assertNotRegex(self, *args, **kwargs):
++    return getattr(self, _assertNotRegex)(*args, **kwargs)
++
++
+ if PY3:
+     exec_ = getattr(moves.builtins, "exec")
+ 
+     def reraise(tp, value, tb=None):
+         try:
+             if value is None:
+                 value = tp()
+             if value.__traceback__ is not tb:
+@@ -714,26 +737,17 @@ else:
+     exec_("""def reraise(tp, value, tb=None):
+     try:
+         raise tp, value, tb
+     finally:
+         tb = None
+ """)
+ 
+ 
+-if sys.version_info[:2] == (3, 2):
+-    exec_("""def raise_from(value, from_value):
+-    try:
+-        if from_value is None:
+-            raise value
+-        raise value from from_value
+-    finally:
+-        value = None
+-""")
+-elif sys.version_info[:2] > (3, 2):
++if sys.version_info[:2] > (3,):
+     exec_("""def raise_from(value, from_value):
+     try:
+         raise value from from_value
+     finally:
+         value = None
+ """)
+ else:
+     def raise_from(value, from_value):
+@@ -803,23 +817,43 @@ if sys.version_info[:2] < (3, 3):
+         flush = kwargs.pop("flush", False)
+         _print(*args, **kwargs)
+         if flush and fp is not None:
+             fp.flush()
+ 
+ _add_doc(reraise, """Reraise an exception.""")
+ 
+ if sys.version_info[0:2] < (3, 4):
++    # This does exactly the same what the :func:`py3:functools.update_wrapper`
++    # function does on Python versions after 3.2. It sets the ``__wrapped__``
++    # attribute on ``wrapper`` object and it doesn't raise an error if any of
++    # the attributes mentioned in ``assigned`` and ``updated`` are missing on
++    # ``wrapped`` object.
++    def _update_wrapper(wrapper, wrapped,
++                        assigned=functools.WRAPPER_ASSIGNMENTS,
++                        updated=functools.WRAPPER_UPDATES):
++        for attr in assigned:
++            try:
++                value = getattr(wrapped, attr)
++            except AttributeError:
++                continue
++            else:
++                setattr(wrapper, attr, value)
++        for attr in updated:
++            getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
++        wrapper.__wrapped__ = wrapped
++        return wrapper
++    _update_wrapper.__doc__ = functools.update_wrapper.__doc__
++
+     def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+               updated=functools.WRAPPER_UPDATES):
+-        def wrapper(f):
+-            f = functools.wraps(wrapped, assigned, updated)(f)
+-            f.__wrapped__ = wrapped
+-            return f
+-        return wrapper
++        return functools.partial(_update_wrapper, wrapped=wrapped,
++                                 assigned=assigned, updated=updated)
++    wraps.__doc__ = functools.wraps.__doc__
++
+ else:
+     wraps = functools.wraps
+ 
+ 
+ def with_metaclass(meta, *bases):
+     """Create a base class with a metaclass."""
+     # This requires a bit of explanation: the basic idea is to make a dummy
+     # metaclass for one level of class instantiation that replaces itself with
+@@ -867,41 +901,43 @@ def ensure_binary(s, encoding='utf-8', e
+     For Python 2:
+       - `unicode` -> encoded to `str`
+       - `str` -> `str`
+ 
+     For Python 3:
+       - `str` -> encoded to `bytes`
+       - `bytes` -> `bytes`
+     """
++    if isinstance(s, binary_type):
++        return s
+     if isinstance(s, text_type):
+         return s.encode(encoding, errors)
+-    elif isinstance(s, binary_type):
+-        return s
+-    else:
+-        raise TypeError("not expecting type '%s'" % type(s))
++    raise TypeError("not expecting type '%s'" % type(s))
+ 
+ 
+ def ensure_str(s, encoding='utf-8', errors='strict'):
+     """Coerce *s* to `str`.
+ 
+     For Python 2:
+       - `unicode` -> encoded to `str`
+       - `str` -> `str`
+ 
+     For Python 3:
+       - `str` -> `str`
+       - `bytes` -> decoded to `str`
+     """
+-    if not isinstance(s, (text_type, binary_type)):
+-        raise TypeError("not expecting type '%s'" % type(s))
++    # Optimization: Fast return for the common case.
++    if type(s) is str:
++        return s
+     if PY2 and isinstance(s, text_type):
+-        s = s.encode(encoding, errors)
++        return s.encode(encoding, errors)
+     elif PY3 and isinstance(s, binary_type):
+-        s = s.decode(encoding, errors)
++        return s.decode(encoding, errors)
++    elif not isinstance(s, (text_type, binary_type)):
++        raise TypeError("not expecting type '%s'" % type(s))
+     return s
+ 
+ 
+ def ensure_text(s, encoding='utf-8', errors='strict'):
+     """Coerce *s* to six.text_type.
+ 
+     For Python 2:
+       - `unicode` -> `unicode`
+@@ -914,20 +950,19 @@ def ensure_text(s, encoding='utf-8', err
+     if isinstance(s, binary_type):
+         return s.decode(encoding, errors)
+     elif isinstance(s, text_type):
+         return s
+     else:
+         raise TypeError("not expecting type '%s'" % type(s))
+ 
+ 
+-
+ def python_2_unicode_compatible(klass):
+     """
+-    A decorator that defines __unicode__ and __str__ methods under Python 2.
++    A class decorator that defines __unicode__ and __str__ methods under Python 2.
+     Under Python 3 it does nothing.
+ 
+     To support Python 2 and 3 with a single code base, define a __str__ method
+     returning text and apply this decorator to the class.
+     """
+     if PY2:
+         if '__str__' not in klass.__dict__:
+             raise ValueError("@python_2_unicode_compatible cannot be applied "
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD b/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD
+deleted file mode 100644
+--- a/third_party/python/urllib3/urllib3-1.26.0.dist-info/RECORD
++++ /dev/null
+@@ -1,44 +0,0 @@
+-urllib3/__init__.py,sha256=j3yzHIbmW7CS-IKQJ9-PPQf_YKO8EOAey_rMW0UR7us,2763
+-urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
+-urllib3/_version.py,sha256=H0vLQ8PY350EPZlZQa8ri0tEjVS-xhGdQOHcU360-0A,63
+-urllib3/connection.py,sha256=BdaUSNpGzO0zq28i9MhOXb6QZspeVdVrYtjnkk2Eqg4,18396
+-urllib3/connectionpool.py,sha256=IKoeuJZY9YAYm0GK4q-MXAhyXW0M_FnvabYaNsDIR-E,37133
+-urllib3/exceptions.py,sha256=lNrKC5J8zeBXIu9SSKSNb7cLi8iXl9ARu9DHD2SflZM,7810
+-urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
+-urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
+-urllib3/poolmanager.py,sha256=whzlX6UTEgODMOCy0ZDMUONRBCz5wyIM8Z9opXAY-Lk,19763
+-urllib3/request.py,sha256=ZFSIqX0C6WizixecChZ3_okyu7BEv0lZu1VT0s6h4SM,5985
+-urllib3/response.py,sha256=hGhGBh7TkEkh_IQg5C1W_xuPNrgIKv5BUXPyE-q0LuE,28203
+-urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+-urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
+-urllib3/contrib/appengine.py,sha256=7Pxb0tKfDB_LTGPERiswH0qomhDoUUOo5kwybAKLQyE,11010
+-urllib3/contrib/ntlmpool.py,sha256=6I95h1_71fzxmoMSNtY0gB8lnyCoVtP_DpqFGj14fdU,4160
+-urllib3/contrib/pyopenssl.py,sha256=vgh6j52w9xgwq-3R2kfB5M2JblQATJfKAK3lIAc1kSg,16778
+-urllib3/contrib/securetransport.py,sha256=KxGPZk8d4YepWm7Rc-SBt1XrzIfnLKc8JkUVV75XzgE,34286
+-urllib3/contrib/socks.py,sha256=DcRjM2l0rQMIyhYrN6r-tnVkY6ZTDxHJlM8_usAkGCA,7097
+-urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+-urllib3/contrib/_securetransport/bindings.py,sha256=E1_7ScsgOchfxneozbAueK7ziCwF35fna4DuDCYJ9_o,17637
+-urllib3/contrib/_securetransport/low_level.py,sha256=lgIdsSycqfB0Xm5BiJzXGeIKT7ybCQMFPJAgkcwPa1s,13908
+-urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
+-urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
+-urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+-urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
+-urllib3/packages/ssl_match_hostname/__init__.py,sha256=zppezdEQdpGsYerI6mV6MfUYy495JV4mcOWC_GgbljU,757
+-urllib3/packages/ssl_match_hostname/_implementation.py,sha256=6dZ-q074g7XhsJ27MFCgkct8iVNZB3sMZvKhf-KUVy0,5679
+-urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
+-urllib3/util/connection.py,sha256=21B-LX0c8fkxPDssyHCaK0pCnmrKmhltg5EoouHiAPU,4910
+-urllib3/util/proxy.py,sha256=FGipAEnvZteyldXNjce4DEB7YzwU-a5lep8y5S0qHQg,1604
+-urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
+-urllib3/util/request.py,sha256=NnzaEKQ1Pauw5MFMV6HmgEMHITf0Aua9fQuzi2uZzGc,4123
+-urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
+-urllib3/util/retry.py,sha256=tn168HDMUynFmXRP-uVaLRUOlbTEJikoB1RuZdwfCes,21366
+-urllib3/util/ssl_.py,sha256=cUsmU604z2zAOZcaXDpINXOokQ1RtlJMe96TBDkaJp0,16199
+-urllib3/util/ssltransport.py,sha256=IvGQvs9YWkf4jzfqVjTu_UWjwAUgPn5ActajW8VLz6A,6908
+-urllib3/util/timeout.py,sha256=QSbBUNOB9yh6AnDn61SrLQ0hg5oz0I9-uXEG91AJuIg,10003
+-urllib3/util/url.py,sha256=LWfLSlI4l2FmUMKfCkElCaW10-0N-sJDT9bxaDZJkjs,13964
+-urllib3/util/wait.py,sha256=3MUKRSAUJDB2tgco7qRUskW0zXGAWYvRRE4Q1_6xlLs,5404
+-urllib3-1.26.0.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
+-urllib3-1.26.0.dist-info/METADATA,sha256=Wghdt6nLf9HfZHhWj8Dpgz4n9vGRqXYhdIwJRPgki6M,42629
+-urllib3-1.26.0.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
+-urllib3-1.26.0.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
+-urllib3-1.26.0.dist-info/RECORD,,
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt b/third_party/python/urllib3/urllib3-1.26.17.dist-info/LICENSE.txt
+rename from third_party/python/urllib3/urllib3-1.26.0.dist-info/LICENSE.txt
+rename to third_party/python/urllib3/urllib3-1.26.17.dist-info/LICENSE.txt
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA b/third_party/python/urllib3/urllib3-1.26.17.dist-info/METADATA
+rename from third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA
+rename to third_party/python/urllib3/urllib3-1.26.17.dist-info/METADATA
+--- a/third_party/python/urllib3/urllib3-1.26.0.dist-info/METADATA
++++ b/third_party/python/urllib3/urllib3-1.26.17.dist-info/METADATA
+@@ -1,50 +1,55 @@
+ Metadata-Version: 2.1
+ Name: urllib3
+-Version: 1.26.0
++Version: 1.26.17
+ Summary: HTTP library with thread-safe connection pooling, file post, and more.
+ Home-page: https://urllib3.readthedocs.io/
+ Author: Andrey Petrov
+ Author-email: andrey.petrov@shazow.net
+ License: MIT
+ Project-URL: Documentation, https://urllib3.readthedocs.io/
+ Project-URL: Code, https://github.com/urllib3/urllib3
+ Project-URL: Issue tracker, https://github.com/urllib3/urllib3/issues
+ Keywords: urllib httplib threadsafe filepost http https ssl pooling
+-Platform: UNKNOWN
+ Classifier: Environment :: Web Environment
+ Classifier: Intended Audience :: Developers
+ Classifier: License :: OSI Approved :: MIT License
+ Classifier: Operating System :: OS Independent
+ Classifier: Programming Language :: Python
+ Classifier: Programming Language :: Python :: 2
+ Classifier: Programming Language :: Python :: 2.7
+ Classifier: Programming Language :: Python :: 3
+-Classifier: Programming Language :: Python :: 3.5
+ Classifier: Programming Language :: Python :: 3.6
+ Classifier: Programming Language :: Python :: 3.7
+ Classifier: Programming Language :: Python :: 3.8
+ Classifier: Programming Language :: Python :: 3.9
++Classifier: Programming Language :: Python :: 3.10
++Classifier: Programming Language :: Python :: 3.11
+ Classifier: Programming Language :: Python :: Implementation :: CPython
+ Classifier: Programming Language :: Python :: Implementation :: PyPy
+ Classifier: Topic :: Internet :: WWW/HTTP
+ Classifier: Topic :: Software Development :: Libraries
+-Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4
++Requires-Python: >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*
+ Description-Content-Type: text/x-rst
++License-File: LICENSE.txt
+ Provides-Extra: brotli
+-Requires-Dist: brotlipy (>=0.6.0) ; extra == 'brotli'
++Requires-Dist: brotlicffi >=0.8.0 ; ((os_name != "nt" or python_version >= "3") and platform_python_implementation != "CPython") and extra == 'brotli'
++Requires-Dist: brotli ==1.0.9 ; (os_name != "nt" and python_version < "3" and platform_python_implementation == "CPython") and extra == 'brotli'
++Requires-Dist: brotlipy >=0.6.0 ; (os_name == "nt" and python_version < "3") and extra == 'brotli'
++Requires-Dist: brotli >=1.0.9 ; (python_version >= "3" and platform_python_implementation == "CPython") and extra == 'brotli'
+ Provides-Extra: secure
+-Requires-Dist: pyOpenSSL (>=0.14) ; extra == 'secure'
+-Requires-Dist: cryptography (>=1.3.4) ; extra == 'secure'
+-Requires-Dist: idna (>=2.0.0) ; extra == 'secure'
++Requires-Dist: pyOpenSSL >=0.14 ; extra == 'secure'
++Requires-Dist: cryptography >=1.3.4 ; extra == 'secure'
++Requires-Dist: idna >=2.0.0 ; extra == 'secure'
+ Requires-Dist: certifi ; extra == 'secure'
++Requires-Dist: urllib3-secure-extra ; extra == 'secure'
+ Requires-Dist: ipaddress ; (python_version == "2.7") and extra == 'secure'
+ Provides-Extra: socks
+-Requires-Dist: PySocks (!=1.5.7,<2.0,>=1.5.6) ; extra == 'socks'
++Requires-Dist: PySocks !=1.5.7,<2.0,>=1.5.6 ; extra == 'socks'
+ 
+ 
+ urllib3 is a powerful, *user-friendly* HTTP client for Python. Much of the
+ Python ecosystem already uses urllib3 and you should too.
+ urllib3 brings many critical features that are missing from the Python
+ standard libraries:
+ 
+ - Thread safety.
+@@ -73,18 +78,20 @@ Installing
+ ----------
+ 
+ urllib3 can be installed with `pip <https://pip.pypa.io>`_::
+ 
+     $ python -m pip install urllib3
+ 
+ Alternatively, you can grab the latest source code from `GitHub <https://github.com/urllib3/urllib3>`_::
+ 
+-    $ git clone git://github.com/urllib3/urllib3.git
+-    $ python setup.py install
++    $ git clone https://github.com/urllib3/urllib3.git
++    $ cd urllib3
++    $ git checkout 1.26.x
++    $ pip install .
+ 
+ 
+ Documentation
+ -------------
+ 
+ urllib3 has usage and reference documentation at `urllib3.readthedocs.io <https://urllib3.readthedocs.io>`_.
+ 
+ 
+@@ -143,16 +150,162 @@ For Enterprise
+        tools.
+ 
+ .. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-urllib3?utm_source=pypi-urllib3&utm_medium=referral&utm_campaign=readme
+ 
+ 
+ Changes
+ =======
+ 
++1.26.17 (2023-10-02)
++--------------------
++
++* Added the ``Cookie`` header to the list of headers to strip from requests when redirecting to a different host. As before, different headers can be set via ``Retry.remove_headers_on_redirect``.
++
++
++1.26.16 (2023-05-23)
++--------------------
++
++* Fixed thread-safety issue where accessing a ``PoolManager`` with many distinct origins
++  would cause connection pools to be closed while requests are in progress (`#2954 <https://github.com/urllib3/urllib3/pull/2954>`_)
++
++
++1.26.15 (2023-03-10)
++--------------------
++
++* Fix socket timeout value when ``HTTPConnection`` is reused (`#2645 <https://github.com/urllib3/urllib3/issues/2645>`__)
++* Remove "!" character from the unreserved characters in IPv6 Zone ID parsing
++  (`#2899 <https://github.com/urllib3/urllib3/issues/2899>`__)
++* Fix IDNA handling of '\x80' byte (`#2901 <https://github.com/urllib3/urllib3/issues/2901>`__)
++
++1.26.14 (2023-01-11)
++--------------------
++
++* Fixed parsing of port 0 (zero) returning None, instead of 0. (`#2850 <https://github.com/urllib3/urllib3/issues/2850>`__)
++* Removed deprecated getheaders() calls in contrib module.
++
++1.26.13 (2022-11-23)
++--------------------
++
++* Deprecated the ``HTTPResponse.getheaders()`` and ``HTTPResponse.getheader()`` methods.
++* Fixed an issue where parsing a URL with leading zeroes in the port would be rejected
++  even when the port number after removing the zeroes was valid.
++* Fixed a deprecation warning when using cryptography v39.0.0.
++* Removed the ``<4`` in the ``Requires-Python`` packaging metadata field.
++
++
++1.26.12 (2022-08-22)
++--------------------
++
++* Deprecated the `urllib3[secure]` extra and the `urllib3.contrib.pyopenssl` module.
++  Both will be removed in v2.x. See this `GitHub issue <https://github.com/urllib3/urllib3/issues/2680>`_
++  for justification and info on how to migrate.
++
++
++1.26.11 (2022-07-25)
++--------------------
++
++* Fixed an issue where reading more than 2 GiB in a call to ``HTTPResponse.read`` would
++  raise an ``OverflowError`` on Python 3.9 and earlier.
++
++
++1.26.10 (2022-07-07)
++--------------------
++
++* Removed support for Python 3.5
++* Fixed an issue where a ``ProxyError`` recommending configuring the proxy as HTTP
++  instead of HTTPS could appear even when an HTTPS proxy wasn't configured.
++
++
++1.26.9 (2022-03-16)
++-------------------
++
++* Changed ``urllib3[brotli]`` extra to favor installing Brotli libraries that are still
++  receiving updates like ``brotli`` and ``brotlicffi`` instead of ``brotlipy``.
++  This change does not impact behavior of urllib3, only which dependencies are installed.
++* Fixed a socket leaking when ``HTTPSConnection.connect()`` raises an exception.
++* Fixed ``server_hostname`` being forwarded from ``PoolManager`` to ``HTTPConnectionPool``
++  when requesting an HTTP URL. Should only be forwarded when requesting an HTTPS URL.
++
++
++1.26.8 (2022-01-07)
++-------------------
++
++* Added extra message to ``urllib3.exceptions.ProxyError`` when urllib3 detects that
++  a proxy is configured to use HTTPS but the proxy itself appears to only use HTTP.
++* Added a mention of the size of the connection pool when discarding a connection due to the pool being full.
++* Added explicit support for Python 3.11.
++* Deprecated the ``Retry.MAX_BACKOFF`` class property in favor of ``Retry.DEFAULT_MAX_BACKOFF``
++  to better match the rest of the default parameter names. ``Retry.MAX_BACKOFF`` is removed in v2.0.
++* Changed location of the vendored ``ssl.match_hostname`` function from ``urllib3.packages.ssl_match_hostname``
++  to ``urllib3.util.ssl_match_hostname`` to ensure Python 3.10+ compatibility after being repackaged
++  by downstream distributors.
++* Fixed absolute imports, all imports are now relative.
++
++
++1.26.7 (2021-09-22)
++-------------------
++
++* Fixed a bug with HTTPS hostname verification involving IP addresses and lack
++  of SNI. (Issue #2400)
++* Fixed a bug where IPv6 braces weren't stripped during certificate hostname
++  matching. (Issue #2240)
++
++
++1.26.6 (2021-06-25)
++-------------------
++
++* Deprecated the ``urllib3.contrib.ntlmpool`` module. urllib3 is not able to support
++  it properly due to `reasons listed in this issue <https://github.com/urllib3/urllib3/issues/2282>`_.
++  If you are a user of this module please leave a comment.
++* Changed ``HTTPConnection.request_chunked()`` to not erroneously emit multiple
++  ``Transfer-Encoding`` headers in the case that one is already specified.
++* Fixed typo in deprecation message to recommend ``Retry.DEFAULT_ALLOWED_METHODS``.
++
++
++1.26.5 (2021-05-26)
++-------------------
++
++* Fixed deprecation warnings emitted in Python 3.10.
++* Updated vendored ``six`` library to 1.16.0.
++* Improved performance of URL parser when splitting
++  the authority component.
++
++
++1.26.4 (2021-03-15)
++-------------------
++
++* Changed behavior of the default ``SSLContext`` when connecting to HTTPS proxy
++  during HTTPS requests. The default ``SSLContext`` now sets ``check_hostname=True``.
++
++
++1.26.3 (2021-01-26)
++-------------------
++
++* Fixed bytes and string comparison issue with headers (Pull #2141)
++
++* Changed ``ProxySchemeUnknown`` error message to be
++  more actionable if the user supplies a proxy URL without
++  a scheme. (Pull #2107)
++
++
++1.26.2 (2020-11-12)
++-------------------
++
++* Fixed an issue where ``wrap_socket`` and ``CERT_REQUIRED`` wouldn't
++  be imported properly on Python 2.7.8 and earlier (Pull #2052)
++
++
++1.26.1 (2020-11-11)
++-------------------
++
++* Fixed an issue where two ``User-Agent`` headers would be sent if a
++  ``User-Agent`` header key is passed as ``bytes`` (Pull #2047)
++
++
+ 1.26.0 (2020-11-10)
+ -------------------
+ 
+ * **NOTE: urllib3 v2.0 will drop support for Python 2**.
+   `Read more in the v2.0 Roadmap <https://urllib3.readthedocs.io/en/latest/v2-roadmap.html>`_.
+ 
+ * Added support for HTTPS proxies contacting HTTPS servers (Pull #1923, Pull #1806)
+ 
+@@ -1326,10 +1479,8 @@ 0.2 (2008-11-17)
+ * Added unit tests.
+ * Bug fixes.
+ 
+ 
+ 0.1 (2008-11-16)
+ ----------------
+ 
+ * First release.
+-
+-
+diff --git a/third_party/python/urllib3/urllib3-1.26.17.dist-info/RECORD b/third_party/python/urllib3/urllib3-1.26.17.dist-info/RECORD
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3-1.26.17.dist-info/RECORD
+@@ -0,0 +1,44 @@
++urllib3/__init__.py,sha256=iXLcYiJySn0GNbWOOZDDApgBL1JgP44EZ8i1760S8Mc,3333
++urllib3/_collections.py,sha256=Rp1mVyBgc_UlAcp6M3at1skJBXR5J43NawRTvW2g_XY,10811
++urllib3/_version.py,sha256=azoM7M7BUADl2kBhMVR6PPf2GhBDI90me1fcnzTwdcw,64
++urllib3/connection.py,sha256=92k9td_y4PEiTIjNufCUa1NzMB3J3w0LEdyokYgXnW8,20300
++urllib3/connectionpool.py,sha256=ItVDasDnPRPP9R8bNxY7tPBlC724nJ9nlxVgXG_SLbI,39990
++urllib3/exceptions.py,sha256=0Mnno3KHTNfXRfY7638NufOPkUb6mXOm-Lqj-4x2w8A,8217
++urllib3/fields.py,sha256=kvLDCg_JmH1lLjUUEY_FLS8UhY7hBvDPuVETbY8mdrM,8579
++urllib3/filepost.py,sha256=5b_qqgRHVlL7uLtdAYBzBh-GHmU5AfJVt_2N0XS3PeY,2440
++urllib3/poolmanager.py,sha256=0i8cJgrqupza67IBPZ_u9jXvnSxr5UBlVEiUqdkPtYI,19752
++urllib3/request.py,sha256=YTWFNr7QIwh7E1W9dde9LM77v2VWTJ5V78XuTTw7D1A,6691
++urllib3/response.py,sha256=UPgLmnHj4z71ZnH8ivYOyncATifTOw9FQukUqDnckCc,30761
++urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
++urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
++urllib3/contrib/appengine.py,sha256=6IBW6lPOoVUxASPwtn6IH1AATe5DK3lLJCfwyWlLKAE,11012
++urllib3/contrib/ntlmpool.py,sha256=NlfkW7WMdW8ziqudopjHoW299og1BTWi0IeIibquFwk,4528
++urllib3/contrib/pyopenssl.py,sha256=4AJAlo9NmjWofY4dJwRa4kbZuRuHfNJxu8Pv6yQk1ss,17055
++urllib3/contrib/securetransport.py,sha256=QOhVbWrFQTKbmV-vtyG69amekkKVxXkdjk9oymaO0Ag,34416
++urllib3/contrib/socks.py,sha256=aRi9eWXo9ZEb95XUxef4Z21CFlnnjbEiAo9HOseoMt4,7097
++urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
++urllib3/contrib/_securetransport/bindings.py,sha256=4Xk64qIkPBt09A5q-RIFUuDhNc9mXilVapm7WnYnzRw,17632
++urllib3/contrib/_securetransport/low_level.py,sha256=B2JBB2_NRP02xK6DCa1Pa9IuxrPwxzDzZbixQkb7U9M,13922
++urllib3/packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
++urllib3/packages/six.py,sha256=b9LM0wBXv7E7SrbCjAm4wwN-hrH-iNxv18LgWNMMKPo,34665
++urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
++urllib3/packages/backports/makefile.py,sha256=nbzt3i0agPVP07jqqgjhaYjMmuAi_W5E0EywZivVO8E,1417
++urllib3/packages/backports/weakref_finalize.py,sha256=tRCal5OAhNSRyb0DhHp-38AtIlCsRP8BxF3NX-6rqIA,5343
++urllib3/util/__init__.py,sha256=JEmSmmqqLyaw8P51gUImZh8Gwg9i1zSe-DoqAitn2nc,1155
++urllib3/util/connection.py,sha256=5Lx2B1PW29KxBn2T0xkN1CBgRBa3gGVJBKoQoRogEVk,4901
++urllib3/util/proxy.py,sha256=zUvPPCJrp6dOF0N4GAVbOcl6o-4uXKSrGiTkkr5vUS4,1605
++urllib3/util/queue.py,sha256=nRgX8_eX-_VkvxoX096QWoz8Ps0QHUAExILCY_7PncM,498
++urllib3/util/request.py,sha256=fWiAaa8pwdLLIqoTLBxCC2e4ed80muzKU3e3HWWTzFQ,4225
++urllib3/util/response.py,sha256=GJpg3Egi9qaJXRwBh5wv-MNuRWan5BIu40oReoxWP28,3510
++urllib3/util/retry.py,sha256=Z6WEf518eTOXP5jr5QSQ9gqJI0DVYt3Xs3EKnYaTmus,22013
++urllib3/util/ssl_.py,sha256=c0sYiSC6272r6uPkxQpo5rYPP9QC1eR6oI7004gYqZo,17165
++urllib3/util/ssl_match_hostname.py,sha256=Ir4cZVEjmAk8gUAIHWSi7wtOO83UCYABY2xFD1Ql_WA,5758
++urllib3/util/ssltransport.py,sha256=NA-u5rMTrDFDFC8QzRKUEKMG0561hOD4qBTr3Z4pv6E,6895
++urllib3/util/timeout.py,sha256=cwq4dMk87mJHSBktK1miYJ-85G-3T3RmT20v7SFCpno,10168
++urllib3/util/url.py,sha256=kMxL1k0d-aQm_iZDw_zMmnyYyjrIA_DbsMy3cm3V55M,14279
++urllib3/util/wait.py,sha256=fOX0_faozG2P7iVojQoE1mbydweNyTcm-hXEfFrTtLI,5403
++urllib3-1.26.17.dist-info/LICENSE.txt,sha256=w3vxhuJ8-dvpYZ5V7f486nswCRzrPaY8fay-Dm13kHs,1115
++urllib3-1.26.17.dist-info/METADATA,sha256=swEiQKmb2m5Vl4fygmy4aLSzZjxDjD8q2-_XzuhO9pA,48743
++urllib3-1.26.17.dist-info/WHEEL,sha256=iYlv5fX357PQyRT2o6tw1bN-YcKFFHKqB_LwHO5wP-g,110
++urllib3-1.26.17.dist-info/top_level.txt,sha256=EMiXL2sKrTcmrMxIHTqdc3ET54pQI2Y072LexFEemvo,8
++urllib3-1.26.17.dist-info/RECORD,,
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL b/third_party/python/urllib3/urllib3-1.26.17.dist-info/WHEEL
+rename from third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL
+rename to third_party/python/urllib3/urllib3-1.26.17.dist-info/WHEEL
+--- a/third_party/python/urllib3/urllib3-1.26.0.dist-info/WHEEL
++++ b/third_party/python/urllib3/urllib3-1.26.17.dist-info/WHEEL
+@@ -1,6 +1,6 @@
+ Wheel-Version: 1.0
+-Generator: bdist_wheel (0.35.1)
++Generator: bdist_wheel (0.41.2)
+ Root-Is-Purelib: true
+ Tag: py2-none-any
+ Tag: py3-none-any
+ 
+diff --git a/third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt b/third_party/python/urllib3/urllib3-1.26.17.dist-info/top_level.txt
+rename from third_party/python/urllib3/urllib3-1.26.0.dist-info/top_level.txt
+rename to third_party/python/urllib3/urllib3-1.26.17.dist-info/top_level.txt
+diff --git a/third_party/python/urllib3/urllib3/__init__.py b/third_party/python/urllib3/urllib3/__init__.py
+--- a/third_party/python/urllib3/urllib3/__init__.py
++++ b/third_party/python/urllib3/urllib3/__init__.py
+@@ -14,16 +14,33 @@ from .connectionpool import HTTPConnecti
+ from .filepost import encode_multipart_formdata
+ from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+ from .response import HTTPResponse
+ from .util.request import make_headers
+ from .util.retry import Retry
+ from .util.timeout import Timeout
+ from .util.url import get_host
+ 
++# === NOTE TO REPACKAGERS AND VENDORS ===
++# Please delete this block, this logic is only
++# for urllib3 being distributed via PyPI.
++# See: https://github.com/urllib3/urllib3/issues/2680
++try:
++    import urllib3_secure_extra  # type: ignore # noqa: F401
++except ImportError:
++    pass
++else:
++    warnings.warn(
++        "'urllib3[secure]' extra is deprecated and will be removed "
++        "in a future release of urllib3 2.x. Read more in this issue: "
++        "https://github.com/urllib3/urllib3/issues/2680",
++        category=DeprecationWarning,
++        stacklevel=2,
++    )
++
+ __author__ = "Andrey Petrov (andrey.petrov@shazow.net)"
+ __license__ = "MIT"
+ __version__ = __version__
+ 
+ __all__ = (
+     "HTTPConnectionPool",
+     "HTTPSConnectionPool",
+     "PoolManager",
+diff --git a/third_party/python/urllib3/urllib3/_version.py b/third_party/python/urllib3/urllib3/_version.py
+--- a/third_party/python/urllib3/urllib3/_version.py
++++ b/third_party/python/urllib3/urllib3/_version.py
+@@ -1,2 +1,2 @@
+ # This file is protected via CODEOWNERS
+-__version__ = "1.26.0"
++__version__ = "1.26.17"
+diff --git a/third_party/python/urllib3/urllib3/connection.py b/third_party/python/urllib3/urllib3/connection.py
+--- a/third_party/python/urllib3/urllib3/connection.py
++++ b/third_party/python/urllib3/urllib3/connection.py
+@@ -38,40 +38,42 @@ try:  # Python 3:
+     # Not a no-op, we're adding this to the namespace so it can be imported.
+     BrokenPipeError = BrokenPipeError
+ except NameError:  # Python 2:
+ 
+     class BrokenPipeError(Exception):
+         pass
+ 
+ 
++from ._collections import HTTPHeaderDict  # noqa (historical, removed in v2)
+ from ._version import __version__
+ from .exceptions import (
+     ConnectTimeoutError,
+     NewConnectionError,
+     SubjectAltNameWarning,
+     SystemTimeWarning,
+ )
+-from .packages.ssl_match_hostname import CertificateError, match_hostname
+ from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
+ from .util.ssl_ import (
+     assert_fingerprint,
+     create_urllib3_context,
++    is_ipaddress,
+     resolve_cert_reqs,
+     resolve_ssl_version,
+     ssl_wrap_socket,
+ )
++from .util.ssl_match_hostname import CertificateError, match_hostname
+ 
+ log = logging.getLogger(__name__)
+ 
+ port_by_scheme = {"http": 80, "https": 443}
+ 
+ # When it comes time to update this value as a part of regular maintenance
+ # (ie test_recent_date is failing) update it to ~6 months before the current date.
+-RECENT_DATE = datetime.date(2019, 1, 1)
++RECENT_DATE = datetime.date(2022, 1, 1)
+ 
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
+ 
+ 
+ class HTTPConnection(_HTTPConnection, object):
+     """
+     Based on :class:`http.client.HTTPConnection` but provides an extra constructor
+     backwards-compatibility layer between older and newer Pythons.
+@@ -101,16 +103,20 @@ class HTTPConnection(_HTTPConnection, ob
+ 
+     #: Disable Nagle's algorithm by default.
+     #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
+     default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
+ 
+     #: Whether this connection verifies the host's certificate.
+     is_verified = False
+ 
++    #: Whether this proxy connection (if used) verifies the proxy host's
++    #: certificate.
++    proxy_is_verified = None
++
+     def __init__(self, *args, **kw):
+         if not six.PY2:
+             kw.pop("strict", None)
+ 
+         # Pre-set source_address.
+         self.source_address = kw.get("source_address")
+ 
+         #: The socket options provided by the user. If no options are
+@@ -195,45 +201,50 @@ class HTTPConnection(_HTTPConnection, ob
+             # Mark this connection as not reusable
+             self.auto_open = 0
+ 
+     def connect(self):
+         conn = self._new_conn()
+         self._prepare_conn(conn)
+ 
+     def putrequest(self, method, url, *args, **kwargs):
+-        """"""
++        """ """
+         # Empty docstring because the indentation of CPython's implementation
+         # is broken but we don't want this method in our documentation.
+         match = _CONTAINS_CONTROL_CHAR_RE.search(method)
+         if match:
+             raise ValueError(
+                 "Method cannot contain non-token characters %r (found at least %r)"
+                 % (method, match.group())
+             )
+ 
+         return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
+ 
+     def putheader(self, header, *values):
+-        """"""
+-        if SKIP_HEADER not in values:
++        """ """
++        if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
+             _HTTPConnection.putheader(self, header, *values)
+         elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
+             raise ValueError(
+                 "urllib3.util.SKIP_HEADER only supports '%s'"
+                 % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
+             )
+ 
+     def request(self, method, url, body=None, headers=None):
++        # Update the inner socket's timeout value to send the request.
++        # This only triggers if the connection is re-used.
++        if getattr(self, "sock", None) is not None:
++            self.sock.settimeout(self.timeout)
++
+         if headers is None:
+             headers = {}
+         else:
+             # Avoid modifying the headers passed into .request()
+             headers = headers.copy()
+-        if "user-agent" not in (k.lower() for k in headers):
++        if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
+             headers["User-Agent"] = _get_default_user_agent()
+         super(HTTPConnection, self).request(method, url, body=body, headers=headers)
+ 
+     def request_chunked(self, method, url, body=None, headers=None):
+         """
+         Alternative to the common request method, which sends the
+         body with chunked encoding and not as one block
+         """
+@@ -243,17 +254,17 @@ class HTTPConnection(_HTTPConnection, ob
+         skip_host = "host" in header_keys
+         self.putrequest(
+             method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
+         )
+         if "user-agent" not in header_keys:
+             self.putheader("User-Agent", _get_default_user_agent())
+         for header, value in headers.items():
+             self.putheader(header, value)
+-        if "transfer-encoding" not in headers:
++        if "transfer-encoding" not in header_keys:
+             self.putheader("Transfer-Encoding", "chunked")
+         self.endheaders()
+ 
+         if body is not None:
+             stringish_types = six.string_types + (bytes,)
+             if isinstance(body, stringish_types):
+                 body = (body,)
+             for chunk in body:
+@@ -344,27 +355,25 @@ class HTTPSConnection(HTTPConnection):
+         self.assert_hostname = assert_hostname
+         self.assert_fingerprint = assert_fingerprint
+         self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+         self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
+         self.ca_cert_data = ca_cert_data
+ 
+     def connect(self):
+         # Add certificate verification
+-        conn = self._new_conn()
++        self.sock = conn = self._new_conn()
+         hostname = self.host
+         tls_in_tls = False
+ 
+         if self._is_using_tunnel():
+             if self.tls_in_tls_required:
+-                conn = self._connect_tls_proxy(hostname, conn)
++                self.sock = conn = self._connect_tls_proxy(hostname, conn)
+                 tls_in_tls = True
+ 
+-            self.sock = conn
+-
+             # Calls self._set_hostport(), so self.host is
+             # self._tunnel_host below.
+             self._tunnel()
+             # Mark this connection as not reusable
+             self.auto_open = 0
+ 
+             # Override the host with the one we're requesting data from.
+             hostname = self._tunnel_host
+@@ -487,27 +496,56 @@ class HTTPSConnection(HTTPConnection):
+             self.cert_reqs,
+             self.ca_certs,
+             self.ca_cert_dir,
+             self.ca_cert_data,
+         )
+ 
+         # If no cert was provided, use only the default options for server
+         # certificate validation
+-        return ssl_wrap_socket(
++        socket = ssl_wrap_socket(
+             sock=conn,
+             ca_certs=self.ca_certs,
+             ca_cert_dir=self.ca_cert_dir,
+             ca_cert_data=self.ca_cert_data,
+             server_hostname=hostname,
+             ssl_context=ssl_context,
+         )
+ 
++        if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
++            ssl_context, "check_hostname", False
++        ):
++            # While urllib3 attempts to always turn off hostname matching from
++            # the TLS library, this cannot always be done. So we check whether
++            # the TLS Library still thinks it's matching hostnames.
++            cert = socket.getpeercert()
++            if not cert.get("subjectAltName", ()):
++                warnings.warn(
++                    (
++                        "Certificate for {0} has no `subjectAltName`, falling back to check for a "
++                        "`commonName` for now. This feature is being removed by major browsers and "
++                        "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
++                        "for details.)".format(hostname)
++                    ),
++                    SubjectAltNameWarning,
++                )
++            _match_hostname(cert, hostname)
++
++        self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
++        return socket
++
+ 
+ def _match_hostname(cert, asserted_hostname):
++    # Our upstream implementation of ssl.match_hostname()
++    # only applies this normalization to IP addresses so it doesn't
++    # match DNS SANs so we do the same thing!
++    stripped_hostname = asserted_hostname.strip("u[]")
++    if is_ipaddress(stripped_hostname):
++        asserted_hostname = stripped_hostname
++
+     try:
+         match_hostname(cert, asserted_hostname)
+     except CertificateError as e:
+         log.warning(
+             "Certificate did not match expected hostname: %s. Certificate: %s",
+             asserted_hostname,
+             cert,
+         )
+diff --git a/third_party/python/urllib3/urllib3/connectionpool.py b/third_party/python/urllib3/urllib3/connectionpool.py
+--- a/third_party/python/urllib3/urllib3/connectionpool.py
++++ b/third_party/python/urllib3/urllib3/connectionpool.py
+@@ -1,12 +1,13 @@
+ from __future__ import absolute_import
+ 
+ import errno
+ import logging
++import re
+ import socket
+ import sys
+ import warnings
+ from socket import error as SocketError
+ from socket import timeout as SocketTimeout
+ 
+ from .connection import (
+     BaseSSLError,
+@@ -30,30 +31,37 @@ from .exceptions import (
+     ProtocolError,
+     ProxyError,
+     ReadTimeoutError,
+     SSLError,
+     TimeoutError,
+ )
+ from .packages import six
+ from .packages.six.moves import queue
+-from .packages.ssl_match_hostname import CertificateError
+ from .request import RequestMethods
+ from .response import HTTPResponse
+ from .util.connection import is_connection_dropped
+ from .util.proxy import connection_requires_http_tunnel
+ from .util.queue import LifoQueue
+ from .util.request import set_file_position
+ from .util.response import assert_header_parsing
+ from .util.retry import Retry
++from .util.ssl_match_hostname import CertificateError
+ from .util.timeout import Timeout
+ from .util.url import Url, _encode_target
+ from .util.url import _normalize_host as normalize_host
+ from .util.url import get_host, parse_url
+ 
++try:  # Platform-specific: Python 3
++    import weakref
++
++    weakref_finalize = weakref.finalize
++except AttributeError:  # Platform-specific: Python 2
++    from .packages.backports.weakref_finalize import weakref_finalize
++
+ xrange = six.moves.xrange
+ 
+ log = logging.getLogger(__name__)
+ 
+ _Default = object()
+ 
+ 
+ # Pool objects
+@@ -214,16 +222,26 @@ class HTTPConnectionPool(ConnectionPool,
+             # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
+             # We cannot know if the user has added default socket options, so we cannot replace the
+             # list.
+             self.conn_kw.setdefault("socket_options", [])
+ 
+             self.conn_kw["proxy"] = self.proxy
+             self.conn_kw["proxy_config"] = self.proxy_config
+ 
++        # Do not pass 'self' as callback to 'finalize'.
++        # Then the 'finalize' would keep an endless living (leak) to self.
++        # By just passing a reference to the pool allows the garbage collector
++        # to free self if nobody else has a reference to it.
++        pool = self.pool
++
++        # Close all the HTTPConnections in the pool before the
++        # HTTPConnectionPool object is garbage collected.
++        weakref_finalize(self, _close_pool_connections, pool)
++
+     def _new_conn(self):
+         """
+         Return a fresh :class:`HTTPConnection`.
+         """
+         self.num_connections += 1
+         log.debug(
+             "Starting new HTTP connection (%d): %s:%s",
+             self.num_connections,
+@@ -296,34 +314,37 @@ class HTTPConnectionPool(ConnectionPool,
+         try:
+             self.pool.put(conn, block=False)
+             return  # Everything is dandy, done.
+         except AttributeError:
+             # self.pool is None.
+             pass
+         except queue.Full:
+             # This should never happen if self.block == True
+-            log.warning("Connection pool is full, discarding connection: %s", self.host)
+-
++            log.warning(
++                "Connection pool is full, discarding connection: %s. Connection pool size: %s",
++                self.host,
++                self.pool.qsize(),
++            )
+         # Connection never got put back into the pool, close it.
+         if conn:
+             conn.close()
+ 
+     def _validate_conn(self, conn):
+         """
+         Called right before a request is made, after the socket is created.
+         """
+         pass
+ 
+     def _prepare_proxy(self, conn):
+         # Nothing to do for HTTP connections.
+         pass
+ 
+     def _get_timeout(self, timeout):
+-        """ Helper that always returns a :class:`urllib3.util.Timeout` """
++        """Helper that always returns a :class:`urllib3.util.Timeout`"""
+         if timeout is _Default:
+             return self.timeout.clone()
+ 
+         if isinstance(timeout, Timeout):
+             return timeout.clone()
+         else:
+             # User passed us an int/float. This is for backwards compatibility,
+             # can be removed later
+@@ -370,17 +391,17 @@ class HTTPConnectionPool(ConnectionPool,
+             the socket connect and the socket read, or an instance of
+             :class:`urllib3.util.Timeout`, which gives you more fine-grained
+             control over your timeouts.
+         """
+         self.num_requests += 1
+ 
+         timeout_obj = self._get_timeout(timeout)
+         timeout_obj.start_connect()
+-        conn.timeout = timeout_obj.connect_timeout
++        conn.timeout = Timeout.resolve_default_timeout(timeout_obj.connect_timeout)
+ 
+         # Trigger any extra validation we need to do.
+         try:
+             self._validate_conn(conn)
+         except (SocketTimeout, BaseSSLError) as e:
+             # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+             self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+             raise
+@@ -480,24 +501,18 @@ class HTTPConnectionPool(ConnectionPool,
+         """
+         Close all pooled connections and disable the pool.
+         """
+         if self.pool is None:
+             return
+         # Disable access to the pool
+         old_pool, self.pool = self.pool, None
+ 
+-        try:
+-            while True:
+-                conn = old_pool.get(block=False)
+-                if conn:
+-                    conn.close()
+-
+-        except queue.Empty:
+-            pass  # Done.
++        # Close all the HTTPConnections in the pool.
++        _close_pool_connections(old_pool)
+ 
+     def is_same_host(self, url):
+         """
+         Check if the given ``url`` is a member of the same host as this
+         connection pool.
+         """
+         if url.startswith("/"):
+             return True
+@@ -740,17 +755,45 @@ class HTTPConnectionPool(ConnectionPool,
+             ProtocolError,
+             BaseSSLError,
+             SSLError,
+             CertificateError,
+         ) as e:
+             # Discard the connection for these exceptions. It will be
+             # replaced during the next _get_conn() call.
+             clean_exit = False
+-            if isinstance(e, (BaseSSLError, CertificateError)):
++
++            def _is_ssl_error_message_from_http_proxy(ssl_error):
++                # We're trying to detect the message 'WRONG_VERSION_NUMBER' but
++                # SSLErrors are kinda all over the place when it comes to the message,
++                # so we try to cover our bases here!
++                message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
++                return (
++                    "wrong version number" in message or "unknown protocol" in message
++                )
++
++            # Try to detect a common user error with proxies which is to
++            # set an HTTP proxy to be HTTPS when it should be 'http://'
++            # (ie {'http': 'http://proxy', 'https': 'https://proxy'})
++            # Instead we add a nice error message and point to a URL.
++            if (
++                isinstance(e, BaseSSLError)
++                and self.proxy
++                and _is_ssl_error_message_from_http_proxy(e)
++                and conn.proxy
++                and conn.proxy.scheme == "https"
++            ):
++                e = ProxyError(
++                    "Your proxy appears to only use HTTP and not HTTPS, "
++                    "try changing your proxy URL to be HTTP. See: "
++                    "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
++                    "#https-proxy-error-http-proxy",
++                    SSLError(e),
++                )
++            elif isinstance(e, (BaseSSLError, CertificateError)):
+                 e = SSLError(e)
+             elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
+                 e = ProxyError("Cannot connect to proxy.", e)
+             elif isinstance(e, (SocketError, HTTPException)):
+                 e = ProtocolError("Connection aborted.", e)
+ 
+             retries = retries.increment(
+                 method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
+@@ -825,17 +868,17 @@ class HTTPConnectionPool(ConnectionPool,
+                 pool_timeout=pool_timeout,
+                 release_conn=release_conn,
+                 chunked=chunked,
+                 body_pos=body_pos,
+                 **response_kw
+             )
+ 
+         # Check if we should retry the HTTP response.
+-        has_retry_after = bool(response.getheader("Retry-After"))
++        has_retry_after = bool(response.headers.get("Retry-After"))
+         if retries.is_retry(method, response.status, has_retry_after):
+             try:
+                 retries = retries.increment(method, url, response=response, _pool=self)
+             except MaxRetryError:
+                 if retries.raise_on_status:
+                     response.drain_conn()
+                     raise
+                 return response
+@@ -1009,22 +1052,33 @@ class HTTPSConnectionPool(HTTPConnection
+         if not getattr(conn, "sock", None):  # AppEngine might not have  `.sock`
+             conn.connect()
+ 
+         if not conn.is_verified:
+             warnings.warn(
+                 (
+                     "Unverified HTTPS request is being made to host '%s'. "
+                     "Adding certificate verification is strongly advised. See: "
+-                    "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
++                    "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+                     "#ssl-warnings" % conn.host
+                 ),
+                 InsecureRequestWarning,
+             )
+ 
++        if getattr(conn, "proxy_is_verified", None) is False:
++            warnings.warn(
++                (
++                    "Unverified HTTPS connection done to an HTTPS proxy. "
++                    "Adding certificate verification is strongly advised. See: "
++                    "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
++                    "#ssl-warnings"
++                ),
++                InsecureRequestWarning,
++            )
++
+ 
+ def connection_from_url(url, **kw):
+     """
+     Given a url, return an :class:`.ConnectionPool` instance of its host.
+ 
+     This is a shortcut for not having to parse out the scheme, host, and port
+     of the url before creating an :class:`.ConnectionPool` instance.
+ 
+@@ -1060,8 +1114,19 @@ def _normalize_host(host, scheme):
+     # Specifically, if we include brackets but also pass the port then
+     # httplib crazily doubles up the square brackets on the Host header.
+     # Instead, we need to make sure we never pass ``None`` as the port.
+     # However, for backward compatibility reasons we can't actually
+     # *assert* that.  See http://bugs.python.org/issue28539
+     if host.startswith("[") and host.endswith("]"):
+         host = host[1:-1]
+     return host
++
++
++def _close_pool_connections(pool):
++    """Drains a queue of connections and closes each one."""
++    try:
++        while True:
++            conn = pool.get(block=False)
++            if conn:
++                conn.close()
++    except queue.Empty:
++        pass  # Done.
+diff --git a/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py b/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
+--- a/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
++++ b/third_party/python/urllib3/urllib3/contrib/_securetransport/bindings.py
+@@ -43,17 +43,17 @@ from ctypes import (
+     c_long,
+     c_size_t,
+     c_uint32,
+     c_ulong,
+     c_void_p,
+ )
+ from ctypes.util import find_library
+ 
+-from urllib3.packages.six import raise_from
++from ...packages.six import raise_from
+ 
+ if platform.system() != "Darwin":
+     raise ImportError("Only macOS is supported")
+ 
+ version = platform.mac_ver()[0]
+ version_info = tuple(map(int, version.split(".")))
+ if version_info < (10, 8):
+     raise OSError(
+diff --git a/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py b/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
+--- a/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
++++ b/third_party/python/urllib3/urllib3/contrib/_securetransport/low_level.py
+@@ -183,16 +183,17 @@ def _cert_array_from_pem(pem_bundle):
+ 
+             CoreFoundation.CFArrayAppendValue(cert_array, cert)
+             CoreFoundation.CFRelease(cert)
+     except Exception:
+         # We need to free the array before the exception bubbles further.
+         # We only want to do that if an error occurs: otherwise, the caller
+         # should free.
+         CoreFoundation.CFRelease(cert_array)
++        raise
+ 
+     return cert_array
+ 
+ 
+ def _is_cert(item):
+     """
+     Returns True if a given CFTypeRef is a certificate.
+     """
+diff --git a/third_party/python/urllib3/urllib3/contrib/appengine.py b/third_party/python/urllib3/urllib3/contrib/appengine.py
+--- a/third_party/python/urllib3/urllib3/contrib/appengine.py
++++ b/third_party/python/urllib3/urllib3/contrib/appengine.py
+@@ -106,17 +106,17 @@ class AppEngineManager(RequestMethods):
+         if not urlfetch:
+             raise AppEnginePlatformError(
+                 "URLFetch is not available in this environment."
+             )
+ 
+         warnings.warn(
+             "urllib3 is using URLFetch on Google App Engine sandbox instead "
+             "of sockets. To use sockets directly instead of URLFetch see "
+-            "https://urllib3.readthedocs.io/en/latest/reference/urllib3.contrib.html.",
++            "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
+             AppEnginePlatformWarning,
+         )
+ 
+         RequestMethods.__init__(self, headers)
+         self.validate_certificate = validate_certificate
+         self.urlfetch_retries = urlfetch_retries
+ 
+         self.retries = retries or Retry.DEFAULT
+@@ -219,17 +219,17 @@ class AppEngineManager(RequestMethods):
+                     headers,
+                     retries=retries,
+                     redirect=redirect,
+                     timeout=timeout,
+                     **response_kw
+                 )
+ 
+         # Check if we should retry the HTTP response.
+-        has_retry_after = bool(http_response.getheader("Retry-After"))
++        has_retry_after = bool(http_response.headers.get("Retry-After"))
+         if retries.is_retry(method, http_response.status, has_retry_after):
+             retries = retries.increment(method, url, response=http_response, _pool=self)
+             log.debug("Retry: %s", url)
+             retries.sleep(http_response)
+             return self.urlopen(
+                 method,
+                 url,
+                 body=body,
+diff --git a/third_party/python/urllib3/urllib3/contrib/ntlmpool.py b/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
+--- a/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
++++ b/third_party/python/urllib3/urllib3/contrib/ntlmpool.py
+@@ -1,22 +1,31 @@
+ """
+ NTLM authenticating pool, contributed by erikcederstran
+ 
+ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+ """
+ from __future__ import absolute_import
+ 
++import warnings
+ from logging import getLogger
+ 
+ from ntlm import ntlm
+ 
+ from .. import HTTPSConnectionPool
+ from ..packages.six.moves.http_client import HTTPSConnection
+ 
++warnings.warn(
++    "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
++    "in urllib3 v2.0 release, urllib3 is not able to support it properly due "
++    "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
++    "If you are a user of this module please comment in the mentioned issue.",
++    DeprecationWarning,
++)
++
+ log = getLogger(__name__)
+ 
+ 
+ class NTLMConnectionPool(HTTPSConnectionPool):
+     """
+     Implements an NTLM authentication version of an urllib3 connection pool
+     """
+ 
+@@ -55,17 +64,17 @@ class NTLMConnectionPool(HTTPSConnection
+ 
+         # Send negotiation message
+         headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
+             self.rawuser
+         )
+         log.debug("Request headers: %s", headers)
+         conn.request("GET", self.authurl, None, headers)
+         res = conn.getresponse()
+-        reshdr = dict(res.getheaders())
++        reshdr = dict(res.headers)
+         log.debug("Response status: %s %s", res.status, res.reason)
+         log.debug("Response headers: %s", reshdr)
+         log.debug("Response data: %s [...]", res.read(100))
+ 
+         # Remove the reference to the socket, so that it can not be closed by
+         # the response object (we want to keep the socket open)
+         res.fp = None
+ 
+@@ -87,17 +96,17 @@ class NTLMConnectionPool(HTTPSConnection
+         auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
+             ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
+         )
+         headers[req_header] = "NTLM %s" % auth_msg
+         log.debug("Request headers: %s", headers)
+         conn.request("GET", self.authurl, None, headers)
+         res = conn.getresponse()
+         log.debug("Response status: %s %s", res.status, res.reason)
+-        log.debug("Response headers: %s", dict(res.getheaders()))
++        log.debug("Response headers: %s", dict(res.headers))
+         log.debug("Response data: %s [...]", res.read()[:100])
+         if res.status != 200:
+             if res.status == 401:
+                 raise Exception("Server rejected request: wrong username or password")
+             raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
+ 
+         res.fp = None
+         log.debug("Connection established")
+diff --git a/third_party/python/urllib3/urllib3/contrib/pyopenssl.py b/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
+--- a/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
++++ b/third_party/python/urllib3/urllib3/contrib/pyopenssl.py
+@@ -42,20 +42,20 @@ compression in Python 2 (see `CRIME atta
+ .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
+ .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
+ .. _pyopenssl: https://www.pyopenssl.org
+ .. _cryptography: https://cryptography.io
+ .. _idna: https://github.com/kjd/idna
+ """
+ from __future__ import absolute_import
+ 
++import OpenSSL.crypto
+ import OpenSSL.SSL
+ from cryptography import x509
+ from cryptography.hazmat.backends.openssl import backend as openssl_backend
+-from cryptography.hazmat.backends.openssl.x509 import _Certificate
+ 
+ try:
+     from cryptography.x509 import UnsupportedExtension
+ except ImportError:
+     # UnsupportedExtension is gone in cryptography >= 2.1.0
+     class UnsupportedExtension(Exception):
+         pass
+ 
+@@ -68,28 +68,39 @@ try:  # Platform-specific: Python 2
+     from socket import _fileobject
+ except ImportError:  # Platform-specific: Python 3
+     _fileobject = None
+     from ..packages.backports.makefile import backport_makefile
+ 
+ import logging
+ import ssl
+ import sys
++import warnings
+ 
+ from .. import util
+ from ..packages import six
++from ..util.ssl_ import PROTOCOL_TLS_CLIENT
++
++warnings.warn(
++    "'urllib3.contrib.pyopenssl' module is deprecated and will be removed "
++    "in a future release of urllib3 2.x. Read more in this issue: "
++    "https://github.com/urllib3/urllib3/issues/2680",
++    category=DeprecationWarning,
++    stacklevel=2,
++)
+ 
+ __all__ = ["inject_into_urllib3", "extract_from_urllib3"]
+ 
+ # SNI always works.
+ HAS_SNI = True
+ 
+ # Map from urllib3 to PyOpenSSL compatible parameter-values.
+ _openssl_versions = {
+     util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
++    PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
+     ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
+ }
+ 
+ if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
+     _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
+ 
+ if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
+     _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+@@ -212,19 +223,18 @@ def _dnsname_to_stdlib(name):
+ def get_subj_alt_name(peer_cert):
+     """
+     Given an PyOpenSSL certificate, provides all the subject alternative names.
+     """
+     # Pass the cert to cryptography, which has much better APIs for this.
+     if hasattr(peer_cert, "to_cryptography"):
+         cert = peer_cert.to_cryptography()
+     else:
+-        # This is technically using private APIs, but should work across all
+-        # relevant versions before PyOpenSSL got a proper API for this.
+-        cert = _Certificate(openssl_backend, peer_cert._x509)
++        der = OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, peer_cert)
++        cert = x509.load_der_x509_certificate(der, openssl_backend)
+ 
+     # We want to find the SAN extension. Ask Cryptography to locate it (it's
+     # faster than looping in Python)
+     try:
+         ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
+     except x509.ExtensionNotFound:
+         # No such extension, return the empty list.
+         return []
+@@ -399,17 +409,16 @@ class WrappedSocket(object):
+ 
+ 
+ if _fileobject:  # Platform-specific: Python 2
+ 
+     def makefile(self, mode, bufsize=-1):
+         self._makefile_refs += 1
+         return _fileobject(self, mode, bufsize, close=True)
+ 
+-
+ else:  # Platform-specific: Python 3
+     makefile = backport_makefile
+ 
+ WrappedSocket.makefile = makefile
+ 
+ 
+ class PyOpenSSLContext(object):
+     """
+diff --git a/third_party/python/urllib3/urllib3/contrib/securetransport.py b/third_party/python/urllib3/urllib3/contrib/securetransport.py
+--- a/third_party/python/urllib3/urllib3/contrib/securetransport.py
++++ b/third_party/python/urllib3/urllib3/contrib/securetransport.py
+@@ -62,16 +62,17 @@ import socket
+ import ssl
+ import struct
+ import threading
+ import weakref
+ 
+ import six
+ 
+ from .. import util
++from ..util.ssl_ import PROTOCOL_TLS_CLIENT
+ from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
+ from ._securetransport.low_level import (
+     _assert_no_error,
+     _build_tls_unknown_ca_alert,
+     _cert_array_from_pem,
+     _create_cfstring_array,
+     _load_client_cert_chain,
+     _temporary_keychain,
+@@ -149,17 +150,18 @@ CIPHER_SUITES = [
+     SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
+     SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
+ ]
+ 
+ # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
+ # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
+ # TLSv1 to 1.2 are supported on macOS 10.8+
+ _protocol_to_min_max = {
+-    util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12)
++    util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
++    PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
+ }
+ 
+ if hasattr(ssl, "PROTOCOL_SSLv2"):
+     _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
+         SecurityConst.kSSLProtocol2,
+         SecurityConst.kSSLProtocol2,
+     )
+ if hasattr(ssl, "PROTOCOL_SSLv3"):
+@@ -763,17 +765,16 @@ class WrappedSocket(object):
+ 
+ 
+ if _fileobject:  # Platform-specific: Python 2
+ 
+     def makefile(self, mode, bufsize=-1):
+         self._makefile_refs += 1
+         return _fileobject(self, mode, bufsize, close=True)
+ 
+-
+ else:  # Platform-specific: Python 3
+ 
+     def makefile(self, mode="r", buffering=None, *args, **kwargs):
+         # We disable buffering with SecureTransport because it conflicts with
+         # the buffering that ST does internally (see issue #1153 for more).
+         buffering = 0
+         return backport_makefile(self, mode, buffering, *args, **kwargs)
+ 
+diff --git a/third_party/python/urllib3/urllib3/contrib/socks.py b/third_party/python/urllib3/urllib3/contrib/socks.py
+--- a/third_party/python/urllib3/urllib3/contrib/socks.py
++++ b/third_party/python/urllib3/urllib3/contrib/socks.py
+@@ -46,17 +46,17 @@ except ImportError:
+     import warnings
+ 
+     from ..exceptions import DependencyWarning
+ 
+     warnings.warn(
+         (
+             "SOCKS support in urllib3 requires the installation of optional "
+             "dependencies: specifically, PySocks.  For more information, see "
+-            "https://urllib3.readthedocs.io/en/latest/contrib.html#socks-proxies"
++            "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
+         ),
+         DependencyWarning,
+     )
+     raise
+ 
+ from socket import error as SocketError
+ from socket import timeout as SocketTimeout
+ 
+diff --git a/third_party/python/urllib3/urllib3/exceptions.py b/third_party/python/urllib3/urllib3/exceptions.py
+--- a/third_party/python/urllib3/urllib3/exceptions.py
++++ b/third_party/python/urllib3/urllib3/exceptions.py
+@@ -284,17 +284,27 @@ class InvalidHeader(HTTPError):
+ 
+ 
+ class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
+     """ProxyManager does not support the supplied scheme"""
+ 
+     # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
+ 
+     def __init__(self, scheme):
+-        message = "Not supported proxy scheme %s" % scheme
++        # 'localhost' is here because our URL parser parses
++        # localhost:8080 -> scheme=localhost, remove if we fix this.
++        if scheme == "localhost":
++            scheme = None
++        if scheme is None:
++            message = "Proxy URL had no scheme, should start with http:// or https://"
++        else:
++            message = (
++                "Proxy URL had unsupported scheme %s, should use http:// or https://"
++                % scheme
++            )
+         super(ProxySchemeUnknown, self).__init__(message)
+ 
+ 
+ class ProxySchemeUnsupported(ValueError):
+     """Fetching HTTPS resources through HTTPS proxies is unsupported"""
+ 
+     pass
+ 
+diff --git a/third_party/python/urllib3/urllib3/packages/__init__.py b/third_party/python/urllib3/urllib3/packages/__init__.py
+--- a/third_party/python/urllib3/urllib3/packages/__init__.py
++++ b/third_party/python/urllib3/urllib3/packages/__init__.py
+@@ -1,5 +0,0 @@
+-from __future__ import absolute_import
+-
+-from . import ssl_match_hostname
+-
+-__all__ = ("ssl_match_hostname",)
+diff --git a/third_party/python/urllib3/urllib3/packages/backports/weakref_finalize.py b/third_party/python/urllib3/urllib3/packages/backports/weakref_finalize.py
+new file mode 100644
+--- /dev/null
++++ b/third_party/python/urllib3/urllib3/packages/backports/weakref_finalize.py
+@@ -0,0 +1,155 @@
++# -*- coding: utf-8 -*-
++"""
++backports.weakref_finalize
++~~~~~~~~~~~~~~~~~~
++
++Backports the Python 3 ``weakref.finalize`` method.
++"""
++from __future__ import absolute_import
++
++import itertools
++import sys
++from weakref import ref
++
++__all__ = ["weakref_finalize"]
++
++
++class weakref_finalize(object):
++    """Class for finalization of weakrefable objects
++    finalize(obj, func, *args, **kwargs) returns a callable finalizer
++    object which will be called when obj is garbage collected. The
++    first time the finalizer is called it evaluates func(*arg, **kwargs)
++    and returns the result. After this the finalizer is dead, and
++    calling it just returns None.
++    When the program exits any remaining finalizers for which the
++    atexit attribute is true will be run in reverse order of creation.
++    By default atexit is true.
++    """
++
++    # Finalizer objects don't have any state of their own.  They are
++    # just used as keys to lookup _Info objects in the registry.  This
++    # ensures that they cannot be part of a ref-cycle.
++
++    __slots__ = ()
++    _registry = {}
++    _shutdown = False
++    _index_iter = itertools.count()
++    _dirty = False
++    _registered_with_atexit = False
++
++    class _Info(object):
++        __slots__ = ("weakref", "func", "args", "kwargs", "atexit", "index")
++
++    def __init__(self, obj, func, *args, **kwargs):
++        if not self._registered_with_atexit:
++            # We may register the exit function more than once because
++            # of a thread race, but that is harmless
++            import atexit
++
++            atexit.register(self._exitfunc)
++            weakref_finalize._registered_with_atexit = True
++        info = self._Info()
++        info.weakref = ref(obj, self)
++        info.func = func
++        info.args = args
++        info.kwargs = kwargs or None
++        info.atexit = True
++        info.index = next(self._index_iter)
++        self._registry[self] = info
++        weakref_finalize._dirty = True
++
++    def __call__(self, _=None):
++        """If alive then mark as dead and return func(*args, **kwargs);
++        otherwise return None"""
++        info = self._registry.pop(self, None)
++        if info and not self._shutdown:
++            return info.func(*info.args, **(info.kwargs or {}))
++
++    def detach(self):
++        """If alive then mark as dead and return (obj, func, args, kwargs);
++        otherwise return None"""
++        info = self._registry.get(self)
++        obj = info and info.weakref()
++        if obj is not None and self._registry.pop(self, None):
++            return (obj, info.func, info.args, info.kwargs or {})
++
++    def peek(self):
++        """If alive then return (obj, func, args, kwargs);
++        otherwise return None"""
++        info = self._registry.get(self)
++        obj = info and info.weakref()
++        if obj is not None:
++            return (obj, info.func, info.args, info.kwargs or {})
++
++    @property
++    def alive(self):
++        """Whether finalizer is alive"""
++        return self in self._registry
++
++    @property
++    def atexit(self):
++        """Whether finalizer should be called at exit"""
++        info = self._registry.get(self)
++        return bool(info) and info.atexit
++
++    @atexit.setter
++    def atexit(self, value):
++        info = self._registry.get(self)
++        if info:
++            info.atexit = bool(value)
++
++    def __repr__(self):
++        info = self._registry.get(self)
++        obj = info and info.weakref()
++        if obj is None:
++            return "<%s object at %#x; dead>" % (type(self).__name__, id(self))
++        else:
++            return "<%s object at %#x; for %r at %#x>" % (
++                type(self).__name__,
++                id(self),
++                type(obj).__name__,
++                id(obj),
++            )
++
++    @classmethod
++    def _select_for_exit(cls):
++        # Return live finalizers marked for exit, oldest first
++        L = [(f, i) for (f, i) in cls._registry.items() if i.atexit]
++        L.sort(key=lambda item: item[1].index)
++        return [f for (f, i) in L]
++
++    @classmethod
++    def _exitfunc(cls):
++        # At shutdown invoke finalizers for which atexit is true.
++        # This is called once all other non-daemonic threads have been
++        # joined.
++        reenable_gc = False
++        try:
++            if cls._registry:
++                import gc
++
++                if gc.isenabled():
++                    reenable_gc = True
++                    gc.disable()
++                pending = None
++                while True:
++                    if pending is None or weakref_finalize._dirty:
++                        pending = cls._select_for_exit()
++                        weakref_finalize._dirty = False
++                    if not pending:
++                        break
++                    f = pending.pop()
++                    try:
++                        # gc is disabled, so (assuming no daemonic
++                        # threads) the following is the only line in
++                        # this function which might trigger creation
++                        # of a new finalizer
++                        f()
++                    except Exception:
++                        sys.excepthook(*sys.exc_info())
++                    assert f not in cls._registry
++        finally:
++            # prevent any more finalizers from executing during shutdown
++            weakref_finalize._shutdown = True
++            if reenable_gc:
++                gc.enable()
+diff --git a/third_party/python/urllib3/urllib3/packages/six.py b/third_party/python/urllib3/urllib3/packages/six.py
+--- a/third_party/python/urllib3/urllib3/packages/six.py
++++ b/third_party/python/urllib3/urllib3/packages/six.py
+@@ -1,9 +1,9 @@
+-# Copyright (c) 2010-2019 Benjamin Peterson
++# Copyright (c) 2010-2020 Benjamin Peterson
+ #
+ # Permission is hereby granted, free of charge, to any person obtaining a copy
+ # of this software and associated documentation files (the "Software"), to deal
+ # in the Software without restriction, including without limitation the rights
+ # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ # copies of the Software, and to permit persons to whom the Software is
+ # furnished to do so, subject to the following conditions:
+ #
+@@ -24,17 +24,17 @@ from __future__ import absolute_import
+ 
+ import functools
+ import itertools
+ import operator
+ import sys
+ import types
+ 
+ __author__ = "Benjamin Peterson <benjamin@python.org>"
+-__version__ = "1.12.0"
++__version__ = "1.16.0"
+ 
+ 
+ # Useful for very coarse version differentiation.
+ PY2 = sys.version_info[0] == 2
+ PY3 = sys.version_info[0] == 3
+ PY34 = sys.version_info[0:2] >= (3, 4)
+ 
+ if PY3:
+@@ -66,16 +66,21 @@ else:
+         except OverflowError:
+             # 32-bit
+             MAXSIZE = int((1 << 31) - 1)
+         else:
+             # 64-bit
+             MAXSIZE = int((1 << 63) - 1)
+         del X
+ 
++if PY34:
++    from importlib.util import spec_from_loader
++else:
++    spec_from_loader = None
++
+ 
+ def _add_doc(func, doc):
+     """Add documentation to a function."""
+     func.__doc__ = doc
+ 
+ 
+ def _import_module(name):
+     """Import module, returning the module after the last dot."""
+@@ -177,16 +182,21 @@ class _SixMetaPathImporter(object):
+     def _get_module(self, fullname):
+         return self.known_modules[self.name + "." + fullname]
+ 
+     def find_module(self, fullname, path=None):
+         if fullname in self.known_modules:
+             return self
+         return None
+ 
++    def find_spec(self, fullname, path, target=None):
++        if fullname in self.known_modules:
++            return spec_from_loader(fullname, self)
++        return None
++
+     def __get_module(self, fullname):
+         try:
+             return self.known_modules[fullname]
+         except KeyError:
+             raise ImportError("This loader does not know module " + fullname)
+ 
+     def load_module(self, fullname):
+         try:
+@@ -215,16 +225,22 @@ class _SixMetaPathImporter(object):
+         """Return None
+ 
+         Required, if is_package is implemented"""
+         self.__get_module(fullname)  # eventually raises ImportError
+         return None
+ 
+     get_source = get_code  # same as get_code
+ 
++    def create_module(self, spec):
++        return self.load_module(spec.name)
++
++    def exec_module(self, module):
++        pass
++
+ 
+ _importer = _SixMetaPathImporter(__name__)
+ 
+ 
+ class _MovedItems(_LazyModule):
+ 
+     """Lazy loading of moved objects"""
+ 
+@@ -255,19 +271,29 @@ class _MovedItems(_LazyModule):
+     MovedAttribute("UserString", "UserString", "collections"),
+     MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+     MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+     MovedAttribute(
+         "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"
+     ),
+     MovedModule("builtins", "__builtin__"),
+     MovedModule("configparser", "ConfigParser"),
++    MovedModule(
++        "collections_abc",
++        "collections",
++        "collections.abc" if sys.version_info >= (3, 3) else "collections",
++    ),
+     MovedModule("copyreg", "copy_reg"),
+     MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+-    MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
++    MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"),
++    MovedModule(
++        "_dummy_thread",
++        "dummy_thread",
++        "_dummy_thread" if sys.version_info < (3, 9) else "_thread",
++    ),
+     MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+     MovedModule("http_cookies", "Cookie", "http.cookies"),
+     MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+     MovedModule("html_parser", "HTMLParser", "html.parser"),
+     MovedModule("http_client", "httplib", "http.client"),
+     MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+     MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+     MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+@@ -302,17 +328,19 @@ class _MovedItems(_LazyModule):
+     MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+     MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+     MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+     MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+     MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+ ]
+ # Add windows specific modules.
+ if sys.platform == "win32":
+-    _moved_attributes += [MovedModule("winreg", "_winreg")]
++    _moved_attributes += [
++        MovedModule("winreg", "_winreg"),
++    ]
+ 
+ for attr in _moved_attributes:
+     setattr(_MovedItems, attr.name, attr)
+     if isinstance(attr, MovedModule):
+         _importer._add_module(attr, "moves." + attr.name)
+ del attr
+ 
+ _MovedItems._moved_attributes = _moved_attributes
+@@ -471,17 +499,17 @@ Module_six_moves_urllib_response._moved_
+ 
+ 
+ class Module_six_moves_urllib_robotparser(_LazyModule):
+ 
+     """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+ 
+ 
+ _urllib_robotparser_moved_attributes = [
+-    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser")
++    MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+ ]
+ for attr in _urllib_robotparser_moved_attributes:
+     setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+ del attr
+ 
+ Module_six_moves_urllib_robotparser._moved_attributes = (
+     _urllib_robotparser_moved_attributes
+ )
+@@ -673,19 +701,21 @@ if PY3:
+ 
+     StringIO = io.StringIO
+     BytesIO = io.BytesIO
+     del io
+     _assertCountEqual = "assertCountEqual"
+     if sys.version_info[1] <= 1:
+         _assertRaisesRegex = "assertRaisesRegexp"
+         _assertRegex = "assertRegexpMatches"
++        _assertNotRegex = "assertNotRegexpMatches"
+     else:
+         _assertRaisesRegex = "assertRaisesRegex"
+         _assertRegex = "assertRegex"
++        _assertNotRegex = "assertNotRegex"
+ else:
+ 
+     def b(s):
+         return s
+ 
+     # Workaround for standalone backslash
+ 
+     def u(s):
+@@ -702,83 +732,76 @@ else:
+ 
+     iterbytes = functools.partial(itertools.imap, ord)
+     import StringIO
+ 
+     StringIO = BytesIO = StringIO.StringIO
+     _assertCountEqual = "assertItemsEqual"
+     _assertRaisesRegex = "assertRaisesRegexp"
+     _assertRegex = "assertRegexpMatches"
++    _assertNotRegex = "assertNotRegexpMatches"
+ _add_doc(b, """Byte literal""")
+ _add_doc(u, """Text literal""")
+ 
+ 
+ def assertCountEqual(self, *args, **kwargs):
+     return getattr(self, _assertCountEqual)(*args, **kwargs)
+ 
+ 
+ def assertRaisesRegex(self, *args, **kwargs):
+     return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+ 
+ 
+ def assertRegex(self, *args, **kwargs):
+     return getattr(self, _assertRegex)(*args, **kwargs)
+ 
+ 
++def assertNotRegex(self, *args, **kwargs):
++    return getattr(self, _assertNotRegex)(*args, **kwargs)
++
++
+ if PY3:
+     exec_ = getattr(moves.builtins, "exec")
+ 
+     def reraise(tp, value, tb=None):
+         try:
+             if value is None:
+                 value = tp()
+             if value.__traceback__ is not tb:
+                 raise value.with_traceback(tb)
+             raise value
+         finally:
+             value = None
+             tb = None
+ 
+-
+ else:
+ 
+     def exec_(_code_, _globs_=None, _locs_=None):
+         """Execute code in a namespace."""
+         if _globs_ is None:
+             frame = sys._getframe(1)
+             _globs_ = frame.f_globals
+             if _locs_ is None:
+                 _locs_ = frame.f_locals
+             del frame
+         elif _locs_ is None:
+             _locs_ = _globs_
+-        exec("""exec _code_ in _globs_, _locs_""")
++        exec ("""exec _code_ in _globs_, _locs_""")
+ 
+     exec_(
+         """def reraise(tp, value, tb=None):
+     try:
+         raise tp, value, tb
+     finally:
+         tb = None
+ """
+     )
+ 
+ 
+-if sys.version_info[:2] == (3, 2):
+-    exec_(
+-        """def raise_from(value, from_value):
+-    try:
+-        if from_value is None:
+-            raise value
+-        raise value from from_value
+-    finally:
+-        value = None
+-"""
+-    )
+-elif sys.version_info[:2] > (3, 2):
++if sys.version_info[:2] > (3,):
+     exec_(
+         """def raise_from(value, from_value):
+     try:
+         raise value from from_value
+     finally:
+         value = None
+ """
+     )
+@@ -858,42 +881,72 @@ if sys.version_info[:2] < (3, 3):
+         _print(*args, **kwargs)
+         if flush and fp is not None:
+             fp.flush()
+ 
+ 
+ _add_doc(reraise, """Reraise an exception.""")
+ 
+ if sys.version_info[0:2] < (3, 4):
++    # This does exactly the same what the :func:`py3:functools.update_wrapper`
++    # function does on Python versions after 3.2. It sets the ``__wrapped__``
++    # attribute on ``wrapper`` object and it doesn't raise an error if any of
++    # the attributes mentioned in ``assigned`` and ``updated`` are missing on
++    # ``wrapped`` object.
++    def _update_wrapper(
++        wrapper,
++        wrapped,
++        assigned=functools.WRAPPER_ASSIGNMENTS,
++        updated=functools.WRAPPER_UPDATES,
++    ):
++        for attr in assigned:
++            try:
++                value = getattr(wrapped, attr)
++            except AttributeError:
++                continue
++            else:
++                setattr(wrapper, attr, value)
++        for attr in updated:
++            getattr(wrapper, attr).update(getattr(wrapped, attr, {}))
++        wrapper.__wrapped__ = wrapped
++        return wrapper
++
++    _update_wrapper.__doc__ = functools.update_wrapper.__doc__
+ 
+     def wraps(
+         wrapped,
+         assigned=functools.WRAPPER_ASSIGNMENTS,
+         updated=functools.WRAPPER_UPDATES,
+     ):
+-        def wrapper(f):
+-            f = functools.wraps(wrapped, assigned, updated)(f)
+-            f.__wrapped__ = wrapped
+-            return f
++        return functools.partial(
++            _update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated
++        )
+ 
+-        return wrapper
+-
++    wraps.__doc__ = functools.wraps.__doc__
+ 
+ else:
+     wraps = functools.wraps
+ 
+ 
+ def with_metaclass(meta, *bases):
+     """Create a base class with a metaclass."""
+     # This requires a bit of explanation: the basic idea is to make a dummy
+     # metaclass for one level of class instantiation that replaces itself with
+     # the actual metaclass.
+     class metaclass(type):
+         def __new__(cls, name, this_bases, d):
+-            return meta(name, bases, d)
++            if sys.version_info[:2] >= (3, 7):
++                # This version introduced PEP 560 that requires a bit
++                # of extra care (we mimic what is done by __build_class__).
++                resolved_bases = types.resolve_bases(bases)
++                if resolved_bases is not bases:
++                    d["__orig_bases__"] = bases
++            else:
++                resolved_bases = bases
++            return meta(name, resolved_bases, d)
+ 
+         @classmethod
+         def __prepare__(cls, name, this_bases):
+             return meta.__prepare__(name, bases)
+ 
+     return type.__new__(metaclass, "temporary_class", (), {})
+ 
+ 
+@@ -923,41 +976,43 @@ def ensure_binary(s, encoding="utf-8", e
+     For Python 2:
+       - `unicode` -> encoded to `str`
+       - `str` -> `str`
+ 
+     For Python 3:
+       - `str` -> encoded to `bytes`
+       - `bytes` -> `bytes`
+     """
++    if isinstance(s, binary_type):
++        return s
+     if isinstance(s, text_type):
+         return s.encode(encoding, errors)
+-    elif isinstance(s, binary_type):
+-        return s
+-    else:
+-        raise TypeError("not expecting type '%s'" % type(s))
++    raise TypeError("not expecting type '%s'" % type(s))
+ 
+ 
+ def ensure_str(s, encoding="utf-8", errors="strict"):
+     """Coerce *s* to `str`.
+ 
+     For Python 2:
+       - `unicode` -> encoded to `str`
+       - `str` -> `str`
+ 
+     For Python 3:
+       - `str` -> `str`
+       - `bytes` -> decoded to `str`
+     """
+-    if not isinstance(s, (text_type, binary_type)):
+-        raise TypeError("not expecting type '%s'" % type(s))
++    # Optimization: Fast return for the common case.
++    if type(s) is str:
++        return s
+     if PY2 and isinstance(s, text_type):
+-        s = s.encode(encoding, errors)
++        return s.encode(encoding, errors)
+     elif PY3 and isinstance(s, binary_type):
+-        s = s.decode(encoding, errors)
++        return s.decode(encoding, errors)
++    elif not isinstance(s, (text_type, binary_type)):
++        raise TypeError("not expecting type '%s'" % type(s))
+     return s
+ 
+ 
+ def ensure_text(s, encoding="utf-8", errors="strict"):
+     """Coerce *s* to six.text_type.
+ 
+     For Python 2:
+       - `unicode` -> `unicode`
+@@ -972,17 +1027,17 @@ def ensure_text(s, encoding="utf-8", err
+     elif isinstance(s, text_type):
+         return s
+     else:
+         raise TypeError("not expecting type '%s'" % type(s))
+ 
+ 
+ def python_2_unicode_compatible(klass):
+     """
+-    A decorator that defines __unicode__ and __str__ methods under Python 2.
++    A class decorator that defines __unicode__ and __str__ methods under Python 2.
+     Under Python 3 it does nothing.
+ 
+     To support Python 2 and 3 with a single code base, define a __str__ method
+     returning text and apply this decorator to the class.
+     """
+     if PY2:
+         if "__str__" not in klass.__dict__:
+             raise ValueError(
+diff --git a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py b/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
+deleted file mode 100644
+--- a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/__init__.py
++++ /dev/null
+@@ -1,22 +0,0 @@
+-import sys
+-
+-try:
+-    # Our match_hostname function is the same as 3.5's, so we only want to
+-    # import the match_hostname function if it's at least that good.
+-    if sys.version_info < (3, 5):
+-        raise ImportError("Fallback to vendored code")
+-
+-    from ssl import CertificateError, match_hostname
+-except ImportError:
+-    try:
+-        # Backport of the function from a pypi module
+-        from backports.ssl_match_hostname import (  # type: ignore
+-            CertificateError,
+-            match_hostname,
+-        )
+-    except ImportError:
+-        # Our vendored copy
+-        from ._implementation import CertificateError, match_hostname  # type: ignore
+-
+-# Not needed, but documenting what we provide.
+-__all__ = ("CertificateError", "match_hostname")
+diff --git a/third_party/python/urllib3/urllib3/poolmanager.py b/third_party/python/urllib3/urllib3/poolmanager.py
+--- a/third_party/python/urllib3/urllib3/poolmanager.py
++++ b/third_party/python/urllib3/urllib3/poolmanager.py
+@@ -29,16 +29,17 @@ SSL_KEYWORDS = (
+     "key_file",
+     "cert_file",
+     "cert_reqs",
+     "ca_certs",
+     "ssl_version",
+     "ca_cert_dir",
+     "ssl_context",
+     "key_password",
++    "server_hostname",
+ )
+ 
+ # All known keyword arguments that could be provided to the pool manager, its
+ # pools, or the underlying connections. This is used to construct a pool key.
+ _key_fields = (
+     "key_scheme",  # str
+     "key_host",  # str
+     "key_port",  # int
+@@ -165,17 +166,17 @@ class PoolManager(RequestMethods):
+     """
+ 
+     proxy = None
+     proxy_config = None
+ 
+     def __init__(self, num_pools=10, headers=None, **connection_pool_kw):
+         RequestMethods.__init__(self, headers)
+         self.connection_pool_kw = connection_pool_kw
+-        self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close())
++        self.pools = RecentlyUsedContainer(num_pools)
+ 
+         # Locally set the pool classes and keys so other PoolManagers can
+         # override them.
+         self.pool_classes_by_scheme = pool_classes_by_scheme
+         self.key_fn_by_scheme = key_fn_by_scheme.copy()
+ 
+     def __enter__(self):
+         return self
+diff --git a/third_party/python/urllib3/urllib3/request.py b/third_party/python/urllib3/urllib3/request.py
+--- a/third_party/python/urllib3/urllib3/request.py
++++ b/third_party/python/urllib3/urllib3/request.py
+@@ -1,11 +1,14 @@
+ from __future__ import absolute_import
+ 
++import sys
++
+ from .filepost import encode_multipart_formdata
++from .packages import six
+ from .packages.six.moves.urllib.parse import urlencode
+ 
+ __all__ = ["RequestMethods"]
+ 
+ 
+ class RequestMethods(object):
+     """
+     Convenience mixin for classes who implement a :meth:`urlopen` method, such
+@@ -163,8 +166,26 @@ class RequestMethods(object):
+ 
+             extra_kw["body"] = body
+             extra_kw["headers"] = {"Content-Type": content_type}
+ 
+         extra_kw["headers"].update(headers)
+         extra_kw.update(urlopen_kw)
+ 
+         return self.urlopen(method, url, **extra_kw)
++
++
++if not six.PY2:
++
++    class RequestModule(sys.modules[__name__].__class__):
++        def __call__(self, *args, **kwargs):
++            """
++            If user tries to call this module directly urllib3 v2.x style raise an error to the user
++            suggesting they may need urllib3 v2
++            """
++            raise TypeError(
++                "'module' object is not callable\n"
++                "urllib3.request() method is not supported in this release, "
++                "upgrade to urllib3 v2 to use it\n"
++                "see https://urllib3.readthedocs.io/en/stable/v2-migration-guide.html"
++            )
++
++    sys.modules[__name__].__class__ = RequestModule
+diff --git a/third_party/python/urllib3/urllib3/response.py b/third_party/python/urllib3/urllib3/response.py
+--- a/third_party/python/urllib3/urllib3/response.py
++++ b/third_party/python/urllib3/urllib3/response.py
+@@ -1,22 +1,28 @@
+ from __future__ import absolute_import
+ 
+ import io
+ import logging
++import sys
++import warnings
+ import zlib
+ from contextlib import contextmanager
+ from socket import error as SocketError
+ from socket import timeout as SocketTimeout
+ 
+ try:
+-    import brotli
++    try:
++        import brotlicffi as brotli
++    except ImportError:
++        import brotli
+ except ImportError:
+     brotli = None
+ 
++from . import util
+ from ._collections import HTTPHeaderDict
+ from .connection import BaseSSLError, HTTPException
+ from .exceptions import (
+     BodyNotHttplibCompatible,
+     DecodeError,
+     HTTPError,
+     IncompleteRead,
+     InvalidChunkLength,
+@@ -473,16 +479,64 @@ class HTTPResponse(io.IOBase):
+                 if self._connection:
+                     self._connection.close()
+ 
+             # If we hold the original response but it's closed now, we should
+             # return the connection back to the pool.
+             if self._original_response and self._original_response.isclosed():
+                 self.release_conn()
+ 
++    def _fp_read(self, amt):
++        """
++        Read a response with the thought that reading the number of bytes
++        larger than can fit in a 32-bit int at a time via SSL in some
++        known cases leads to an overflow error that has to be prevented
++        if `amt` or `self.length_remaining` indicate that a problem may
++        happen.
++
++        The known cases:
++          * 3.8 <= CPython < 3.9.7 because of a bug
++            https://github.com/urllib3/urllib3/issues/2513#issuecomment-1152559900.
++          * urllib3 injected with pyOpenSSL-backed SSL-support.
++          * CPython < 3.10 only when `amt` does not fit 32-bit int.
++        """
++        assert self._fp
++        c_int_max = 2 ** 31 - 1
++        if (
++            (
++                (amt and amt > c_int_max)
++                or (self.length_remaining and self.length_remaining > c_int_max)
++            )
++            and not util.IS_SECURETRANSPORT
++            and (util.IS_PYOPENSSL or sys.version_info < (3, 10))
++        ):
++            buffer = io.BytesIO()
++            # Besides `max_chunk_amt` being a maximum chunk size, it
++            # affects memory overhead of reading a response by this
++            # method in CPython.
++            # `c_int_max` equal to 2 GiB - 1 byte is the actual maximum
++            # chunk size that does not lead to an overflow error, but
++            # 256 MiB is a compromise.
++            max_chunk_amt = 2 ** 28
++            while amt is None or amt != 0:
++                if amt is not None:
++                    chunk_amt = min(amt, max_chunk_amt)
++                    amt -= chunk_amt
++                else:
++                    chunk_amt = max_chunk_amt
++                data = self._fp.read(chunk_amt)
++                if not data:
++                    break
++                buffer.write(data)
++                del data  # to reduce peak memory usage by `max_chunk_amt`.
++            return buffer.getvalue()
++        else:
++            # StringIO doesn't like amt=None
++            return self._fp.read(amt) if amt is not None else self._fp.read()
++
+     def read(self, amt=None, decode_content=None, cache_content=False):
+         """
+         Similar to :meth:`http.client.HTTPResponse.read`, but with two additional
+         parameters: ``decode_content`` and ``cache_content``.
+ 
+         :param amt:
+             How much of the content to read. If specified, caching is skipped
+             because it doesn't make sense to cache partial content as the full
+@@ -505,23 +559,21 @@ class HTTPResponse(io.IOBase):
+ 
+         if self._fp is None:
+             return
+ 
+         flush_decoder = False
+         fp_closed = getattr(self._fp, "closed", False)
+ 
+         with self._error_catcher():
++            data = self._fp_read(amt) if not fp_closed else b""
+             if amt is None:
+-                # cStringIO doesn't like amt=None
+-                data = self._fp.read() if not fp_closed else b""
+                 flush_decoder = True
+             else:
+                 cache_content = False
+-                data = self._fp.read(amt) if not fp_closed else b""
+                 if (
+                     amt != 0 and not data
+                 ):  # Platform-specific: Buggy versions of Python.
+                     # Close the connection when no data is returned
+                     #
+                     # This is redundant to what httplib/http.client _should_
+                     # already do.  However, versions of python released before
+                     # December 15, 2012 (http://bugs.python.org/issue16298) do
+@@ -607,19 +659,31 @@ class HTTPResponse(io.IOBase):
+             strict=strict,
+             original_response=r,
+             **response_kw
+         )
+         return resp
+ 
+     # Backwards-compatibility methods for http.client.HTTPResponse
+     def getheaders(self):
++        warnings.warn(
++            "HTTPResponse.getheaders() is deprecated and will be removed "
++            "in urllib3 v2.1.0. Instead access HTTPResponse.headers directly.",
++            category=DeprecationWarning,
++            stacklevel=2,
++        )
+         return self.headers
+ 
+     def getheader(self, name, default=None):
++        warnings.warn(
++            "HTTPResponse.getheader() is deprecated and will be removed "
++            "in urllib3 v2.1.0. Instead use HTTPResponse.headers.get(name, default).",
++            category=DeprecationWarning,
++            stacklevel=2,
++        )
+         return self.headers.get(name, default)
+ 
+     # Backwards compatibility for http.cookiejar
+     def info(self):
+         return self.headers
+ 
+     # Overrides from io.IOBase
+     def close(self):
+diff --git a/third_party/python/urllib3/urllib3/util/connection.py b/third_party/python/urllib3/urllib3/util/connection.py
+--- a/third_party/python/urllib3/urllib3/util/connection.py
++++ b/third_party/python/urllib3/urllib3/util/connection.py
+@@ -1,15 +1,14 @@
+ from __future__ import absolute_import
+ 
+ import socket
+ 
+-from urllib3.exceptions import LocationParseError
+-
+ from ..contrib import _appengine_environ
++from ..exceptions import LocationParseError
+ from ..packages import six
+ from .wait import NoWayToWaitForSocketError, wait_for_read
+ 
+ 
+ def is_connection_dropped(conn):  # Platform-specific
+     """
+     Returns True if the connection is dropped and should be closed.
+ 
+@@ -113,17 +112,17 @@ def allowed_gai_family():
+ 
+     family = socket.AF_INET
+     if HAS_IPV6:
+         family = socket.AF_UNSPEC
+     return family
+ 
+ 
+ def _has_ipv6(host):
+-    """ Returns True if the system can bind an IPv6 address. """
++    """Returns True if the system can bind an IPv6 address."""
+     sock = None
+     has_ipv6 = False
+ 
+     # App Engine doesn't support IPV6 sockets and actually has a quota on the
+     # number of sockets that can be used, so just early out here instead of
+     # creating a socket needlessly.
+     # See https://github.com/urllib3/urllib3/issues/1446
+     if _appengine_environ.is_appengine_sandbox():
+diff --git a/third_party/python/urllib3/urllib3/util/proxy.py b/third_party/python/urllib3/urllib3/util/proxy.py
+--- a/third_party/python/urllib3/urllib3/util/proxy.py
++++ b/third_party/python/urllib3/urllib3/util/proxy.py
+@@ -40,16 +40,17 @@ def create_proxy_ssl_context(
+     """
+     Generates a default proxy ssl context if one hasn't been provided by the
+     user.
+     """
+     ssl_context = create_urllib3_context(
+         ssl_version=resolve_ssl_version(ssl_version),
+         cert_reqs=resolve_cert_reqs(cert_reqs),
+     )
++
+     if (
+         not ca_certs
+         and not ca_cert_dir
+         and not ca_cert_data
+         and hasattr(ssl_context, "load_default_certs")
+     ):
+         ssl_context.load_default_certs()
+ 
+diff --git a/third_party/python/urllib3/urllib3/util/request.py b/third_party/python/urllib3/urllib3/util/request.py
+--- a/third_party/python/urllib3/urllib3/util/request.py
++++ b/third_party/python/urllib3/urllib3/util/request.py
+@@ -9,17 +9,20 @@ from ..packages.six import b, integer_ty
+ # emitting some HTTP headers that are added automatically.
+ # The only headers that are supported are ``Accept-Encoding``,
+ # ``Host``, and ``User-Agent``.
+ SKIP_HEADER = "@@@SKIP_HEADER@@@"
+ SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"])
+ 
+ ACCEPT_ENCODING = "gzip,deflate"
+ try:
+-    import brotli as _unused_module_brotli  # noqa: F401
++    try:
++        import brotlicffi as _unused_module_brotli  # noqa: F401
++    except ImportError:
++        import brotli as _unused_module_brotli  # noqa: F401
+ except ImportError:
+     pass
+ else:
+     ACCEPT_ENCODING += ",br"
+ 
+ _FAILEDTELL = object()
+ 
+ 
+diff --git a/third_party/python/urllib3/urllib3/util/retry.py b/third_party/python/urllib3/urllib3/util/retry.py
+--- a/third_party/python/urllib3/urllib3/util/retry.py
++++ b/third_party/python/urllib3/urllib3/util/retry.py
+@@ -32,17 +32,17 @@ RequestHistory = namedtuple(
+ _Default = object()
+ 
+ 
+ class _RetryMeta(type):
+     @property
+     def DEFAULT_METHOD_WHITELIST(cls):
+         warnings.warn(
+             "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+-            "will be removed in v2.0. Use 'Retry.DEFAULT_METHODS_ALLOWED' instead",
++            "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead",
+             DeprecationWarning,
+         )
+         return cls.DEFAULT_ALLOWED_METHODS
+ 
+     @DEFAULT_METHOD_WHITELIST.setter
+     def DEFAULT_METHOD_WHITELIST(cls, value):
+         warnings.warn(
+             "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and "
+@@ -64,16 +64,34 @@ class _RetryMeta(type):
+     def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value):
+         warnings.warn(
+             "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and "
+             "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead",
+             DeprecationWarning,
+         )
+         cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value
+ 
++    @property
++    def BACKOFF_MAX(cls):
++        warnings.warn(
++            "Using 'Retry.BACKOFF_MAX' is deprecated and "
++            "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
++            DeprecationWarning,
++        )
++        return cls.DEFAULT_BACKOFF_MAX
++
++    @BACKOFF_MAX.setter
++    def BACKOFF_MAX(cls, value):
++        warnings.warn(
++            "Using 'Retry.BACKOFF_MAX' is deprecated and "
++            "will be removed in v2.0. Use 'Retry.DEFAULT_BACKOFF_MAX' instead",
++            DeprecationWarning,
++        )
++        cls.DEFAULT_BACKOFF_MAX = value
++
+ 
+ @six.add_metaclass(_RetryMeta)
+ class Retry(object):
+     """Retry configuration.
+ 
+     Each retry attempt will create a new Retry object with updated values, so
+     they can be safely reused.
+ 
+@@ -176,17 +194,17 @@ class Retry(object):
+         A backoff factor to apply between attempts after the second try
+         (most errors are resolved immediately by a second try without a
+         delay). urllib3 will sleep for::
+ 
+             {backoff factor} * (2 ** ({number of total retries} - 1))
+ 
+         seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep
+         for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer
+-        than :attr:`Retry.BACKOFF_MAX`.
++        than :attr:`Retry.DEFAULT_BACKOFF_MAX`.
+ 
+         By default, backoff is disabled (set to 0).
+ 
+     :param bool raise_on_redirect: Whether, if the number of redirects is
+         exhausted, to raise a MaxRetryError, or to return a response with a
+         response code in the 3xx range.
+ 
+     :param bool raise_on_status: Similar meaning to ``raise_on_redirect``:
+@@ -212,20 +230,20 @@ class Retry(object):
+     DEFAULT_ALLOWED_METHODS = frozenset(
+         ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"]
+     )
+ 
+     #: Default status codes to be used for ``status_forcelist``
+     RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503])
+ 
+     #: Default headers to be used for ``remove_headers_on_redirect``
+-    DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"])
++    DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Cookie", "Authorization"])
+ 
+     #: Maximum backoff time.
+-    BACKOFF_MAX = 120
++    DEFAULT_BACKOFF_MAX = 120
+ 
+     def __init__(
+         self,
+         total=10,
+         connect=None,
+         read=None,
+         redirect=None,
+         status=None,
+@@ -248,16 +266,17 @@ class Retry(object):
+                     "Using both 'allowed_methods' and "
+                     "'method_whitelist' together is not allowed. "
+                     "Instead only use 'allowed_methods'"
+                 )
+             warnings.warn(
+                 "Using 'method_whitelist' with Retry is deprecated and "
+                 "will be removed in v2.0. Use 'allowed_methods' instead",
+                 DeprecationWarning,
++                stacklevel=2,
+             )
+             allowed_methods = method_whitelist
+         if allowed_methods is _Default:
+             allowed_methods = self.DEFAULT_ALLOWED_METHODS
+         if remove_headers_on_redirect is _Default:
+             remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT
+ 
+         self.total = total
+@@ -315,17 +334,17 @@ class Retry(object):
+             else:
+                 params["allowed_methods"] = self.allowed_methods
+ 
+         params.update(kw)
+         return type(self)(**params)
+ 
+     @classmethod
+     def from_int(cls, retries, redirect=True, default=None):
+-        """ Backwards-compatibility for the old retries format."""
++        """Backwards-compatibility for the old retries format."""
+         if retries is None:
+             retries = default if default is not None else cls.DEFAULT
+ 
+         if isinstance(retries, Retry):
+             return retries
+ 
+         redirect = bool(redirect) and None
+         new_retries = cls(retries, redirect=redirect)
+@@ -342,17 +361,17 @@ class Retry(object):
+             list(
+                 takewhile(lambda x: x.redirect_location is None, reversed(self.history))
+             )
+         )
+         if consecutive_errors_len <= 1:
+             return 0
+ 
+         backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1))
+-        return min(self.BACKOFF_MAX, backoff_value)
++        return min(self.DEFAULT_BACKOFF_MAX, backoff_value)
+ 
+     def parse_retry_after(self, retry_after):
+         # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4
+         if re.match(r"^\s*[0-9]+\s*$", retry_after):
+             seconds = int(retry_after)
+         else:
+             retry_date_tuple = email.utils.parsedate_tz(retry_after)
+             if retry_date_tuple is None:
+@@ -368,19 +387,19 @@ class Retry(object):
+             seconds = retry_date - time.time()
+ 
+         if seconds < 0:
+             seconds = 0
+ 
+         return seconds
+ 
+     def get_retry_after(self, response):
+-        """ Get the value of Retry-After in seconds. """
++        """Get the value of Retry-After in seconds."""
+ 
+-        retry_after = response.getheader("Retry-After")
++        retry_after = response.headers.get("Retry-After")
+ 
+         if retry_after is None:
+             return None
+ 
+         return self.parse_retry_after(retry_after)
+ 
+     def sleep_for_retry(self, response=None):
+         retry_after = self.get_retry_after(response)
+@@ -462,17 +481,17 @@ class Retry(object):
+         return (
+             self.total
+             and self.respect_retry_after_header
+             and has_retry_after
+             and (status_code in self.RETRY_AFTER_STATUS_CODES)
+         )
+ 
+     def is_exhausted(self):
+-        """ Are we out of retries? """
++        """Are we out of retries?"""
+         retry_counts = (
+             self.total,
+             self.connect,
+             self.read,
+             self.redirect,
+             self.status,
+             self.other,
+         )
+diff --git a/third_party/python/urllib3/urllib3/util/ssl_.py b/third_party/python/urllib3/urllib3/util/ssl_.py
+--- a/third_party/python/urllib3/urllib3/util/ssl_.py
++++ b/third_party/python/urllib3/urllib3/util/ssl_.py
+@@ -39,35 +39,48 @@ def _const_compare_digest_backport(a, b)
+         result |= left ^ right
+     return result == 0
+ 
+ 
+ _const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport)
+ 
+ try:  # Test for SSL features
+     import ssl
++    from ssl import CERT_REQUIRED, wrap_socket
++except ImportError:
++    pass
++
++try:
+     from ssl import HAS_SNI  # Has SNI?
+-    from ssl import CERT_REQUIRED, wrap_socket
++except ImportError:
++    pass
+ 
++try:
+     from .ssltransport import SSLTransport
+ except ImportError:
+     pass
+ 
++
+ try:  # Platform-specific: Python 3.6
+     from ssl import PROTOCOL_TLS
+ 
+     PROTOCOL_SSLv23 = PROTOCOL_TLS
+ except ImportError:
+     try:
+         from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS
+ 
+         PROTOCOL_SSLv23 = PROTOCOL_TLS
+     except ImportError:
+         PROTOCOL_SSLv23 = PROTOCOL_TLS = 2
+ 
++try:
++    from ssl import PROTOCOL_TLS_CLIENT
++except ImportError:
++    PROTOCOL_TLS_CLIENT = PROTOCOL_TLS
++
+ 
+ try:
+     from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3
+ except ImportError:
+     OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+     OP_NO_COMPRESSION = 0x20000
+ 
+ 
+@@ -146,17 +159,17 @@ except ImportError:
+             self.ciphers = cipher_suite
+ 
+         def wrap_socket(self, socket, server_hostname=None, server_side=False):
+             warnings.warn(
+                 "A true SSLContext object is not available. This prevents "
+                 "urllib3 from configuring SSL appropriately and may cause "
+                 "certain SSL connections to fail. You can upgrade to a newer "
+                 "version of Python to solve this. For more information, see "
+-                "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
++                "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+                 "#ssl-warnings",
+                 InsecurePlatformWarning,
+             )
+             kwargs = {
+                 "keyfile": self.keyfile,
+                 "certfile": self.certfile,
+                 "ca_certs": self.ca_certs,
+                 "cert_reqs": self.verify_mode,
+@@ -265,17 +278,21 @@ def create_urllib3_context(
+         Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+         ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``.
+     :param ciphers:
+         Which cipher suites to allow the server to select.
+     :returns:
+         Constructed SSLContext object with specified options
+     :rtype: SSLContext
+     """
+-    context = SSLContext(ssl_version or PROTOCOL_TLS)
++    # PROTOCOL_TLS is deprecated in Python 3.10
++    if not ssl_version or ssl_version == PROTOCOL_TLS:
++        ssl_version = PROTOCOL_TLS_CLIENT
++
++    context = SSLContext(ssl_version)
+ 
+     context.set_ciphers(ciphers or DEFAULT_CIPHERS)
+ 
+     # Setting the default here, as we may have no ssl module on import
+     cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
+ 
+     if options is None:
+         options = 0
+@@ -300,23 +317,35 @@ def create_urllib3_context(
+     # versions of Python.  We only enable on Python 3.7.4+ or if certificate
+     # verification is enabled to work around Python issue #37428
+     # See: https://bugs.python.org/issue37428
+     if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr(
+         context, "post_handshake_auth", None
+     ) is not None:
+         context.post_handshake_auth = True
+ 
+-    context.verify_mode = cert_reqs
+-    if (
+-        getattr(context, "check_hostname", None) is not None
+-    ):  # Platform-specific: Python 3.2
+-        # We do our own verification, including fingerprints and alternative
+-        # hostnames. So disable it here
+-        context.check_hostname = False
++    def disable_check_hostname():
++        if (
++            getattr(context, "check_hostname", None) is not None
++        ):  # Platform-specific: Python 3.2
++            # We do our own verification, including fingerprints and alternative
++            # hostnames. So disable it here
++            context.check_hostname = False
++
++    # The order of the below lines setting verify_mode and check_hostname
++    # matter due to safe-guards SSLContext has to prevent an SSLContext with
++    # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more
++    # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used
++    # or not so we don't know the initial state of the freshly created SSLContext.
++    if cert_reqs == ssl.CERT_REQUIRED:
++        context.verify_mode = cert_reqs
++        disable_check_hostname()
++    else:
++        disable_check_hostname()
++        context.verify_mode = cert_reqs
+ 
+     # Enable logging of TLS session keys via defacto standard environment variable
+     # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.
+     if hasattr(context, "keylog_filename"):
+         sslkeylogfile = os.environ.get("SSLKEYLOGFILE")
+         if sslkeylogfile:
+             context.keylog_filename = sslkeylogfile
+ 
+@@ -388,17 +417,17 @@ def ssl_wrap_socket(
+         if key_password is None:
+             context.load_cert_chain(certfile, keyfile)
+         else:
+             context.load_cert_chain(certfile, keyfile, key_password)
+ 
+     try:
+         if hasattr(context, "set_alpn_protocols"):
+             context.set_alpn_protocols(ALPN_PROTOCOLS)
+-    except NotImplementedError:
++    except NotImplementedError:  # Defensive: in CI, we always have set_alpn_protocols
+         pass
+ 
+     # If we detect server_hostname is an IP address then the SNI
+     # extension should not be used according to RFC3546 Section 3.1
+     use_sni_hostname = server_hostname and not is_ipaddress(server_hostname)
+     # SecureTransport uses server_hostname in certificate verification.
+     send_sni = (use_sni_hostname and HAS_SNI) or (
+         IS_SECURETRANSPORT and server_hostname
+@@ -406,17 +435,17 @@ def ssl_wrap_socket(
+     # Do not warn the user if server_hostname is an invalid SNI hostname.
+     if not HAS_SNI and use_sni_hostname:
+         warnings.warn(
+             "An HTTPS request has been made, but the SNI (Server Name "
+             "Indication) extension to TLS is not available on this platform. "
+             "This may cause the server to present an incorrect TLS "
+             "certificate, which can cause validation failures. You can upgrade to "
+             "a newer version of Python to solve this. For more information, see "
+-            "https://urllib3.readthedocs.io/en/latest/advanced-usage.html"
++            "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
+             "#ssl-warnings",
+             SNIMissingWarning,
+         )
+ 
+     if send_sni:
+         ssl_sock = _ssl_wrap_socket_impl(
+             sock, context, tls_in_tls, server_hostname=server_hostname
+         )
+diff --git a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/_implementation.py b/third_party/python/urllib3/urllib3/util/ssl_match_hostname.py
+rename from third_party/python/urllib3/urllib3/packages/ssl_match_hostname/_implementation.py
+rename to third_party/python/urllib3/urllib3/util/ssl_match_hostname.py
+--- a/third_party/python/urllib3/urllib3/packages/ssl_match_hostname/_implementation.py
++++ b/third_party/python/urllib3/urllib3/util/ssl_match_hostname.py
+@@ -4,17 +4,17 @@
+ # stdlib.   http://docs.python.org/3/license.html
+ 
+ import re
+ import sys
+ 
+ # ipaddress has been backported to 2.6+ in pypi.  If it is installed on the
+ # system, use it to handle IPAddress ServerAltnames (this was added in
+ # python-3.5) otherwise only do DNS matching.  This allows
+-# backports.ssl_match_hostname to continue to be used in Python 2.7.
++# util.ssl_match_hostname to continue to be used in Python 2.7.
+ try:
+     import ipaddress
+ except ImportError:
+     ipaddress = None
+ 
+ __version__ = "3.5.0.1"
+ 
+ 
+@@ -73,17 +73,18 @@ def _dnsname_match(dn, hostname, max_wil
+         pats.append(re.escape(frag))
+ 
+     pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE)
+     return pat.match(hostname)
+ 
+ 
+ def _to_unicode(obj):
+     if isinstance(obj, str) and sys.version_info < (3,):
+-        obj = unicode(obj, encoding="ascii", errors="strict")
++        # ignored flake8 # F821 to support python 2.7 function
++        obj = unicode(obj, encoding="ascii", errors="strict")  # noqa: F821
+     return obj
+ 
+ 
+ def _ipaddress_match(ipname, host_ip):
+     """Exact matching of IP addresses.
+ 
+     RFC 6125 explicitly doesn't define an algorithm for this
+     (section 1.7.2 - "Out of Scope").
+@@ -106,29 +107,27 @@ def match_hostname(cert, hostname):
+         raise ValueError(
+             "empty or no certificate, match_hostname needs a "
+             "SSL socket or SSL context with either "
+             "CERT_OPTIONAL or CERT_REQUIRED"
+         )
+     try:
+         # Divergence from upstream: ipaddress can't handle byte str
+         host_ip = ipaddress.ip_address(_to_unicode(hostname))
+-    except ValueError:
+-        # Not an IP address (common case)
+-        host_ip = None
+-    except UnicodeError:
+-        # Divergence from upstream: Have to deal with ipaddress not taking
++    except (UnicodeError, ValueError):
++        # ValueError: Not an IP address (common case)
++        # UnicodeError: Divergence from upstream: Have to deal with ipaddress not taking
+         # byte strings.  addresses should be all ascii, so we consider it not
+         # an ipaddress in this case
+         host_ip = None
+     except AttributeError:
+         # Divergence from upstream: Make ipaddress library optional
+         if ipaddress is None:
+             host_ip = None
+-        else:
++        else:  # Defensive
+             raise
+     dnsnames = []
+     san = cert.get("subjectAltName", ())
+     for key, value in san:
+         if key == "DNS":
+             if host_ip is None and _dnsname_match(value, hostname):
+                 return
+             dnsnames.append(value)
+diff --git a/third_party/python/urllib3/urllib3/util/ssltransport.py b/third_party/python/urllib3/urllib3/util/ssltransport.py
+--- a/third_party/python/urllib3/urllib3/util/ssltransport.py
++++ b/third_party/python/urllib3/urllib3/util/ssltransport.py
+@@ -1,14 +1,14 @@
+ import io
+ import socket
+ import ssl
+ 
+-from urllib3.exceptions import ProxySchemeUnsupported
+-from urllib3.packages import six
++from ..exceptions import ProxySchemeUnsupported
++from ..packages import six
+ 
+ SSL_BLOCKSIZE = 16384
+ 
+ 
+ class SSLTransport:
+     """
+     The SSLTransport wraps an existing socket and establishes an SSL connection.
+ 
+@@ -188,17 +188,17 @@ class SSLTransport:
+             return self._ssl_io_loop(self.sslobj.read, len, buffer)
+         except ssl.SSLError as e:
+             if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs:
+                 return 0  # eof, return 0.
+             else:
+                 raise
+ 
+     def _ssl_io_loop(self, func, *args):
+-        """ Performs an I/O loop between incoming/outgoing and the socket."""
++        """Performs an I/O loop between incoming/outgoing and the socket."""
+         should_loop = True
+         ret = None
+ 
+         while should_loop:
+             errno = None
+             try:
+                 ret = func(*args)
+             except ssl.SSLError as e:
+diff --git a/third_party/python/urllib3/urllib3/util/timeout.py b/third_party/python/urllib3/urllib3/util/timeout.py
+--- a/third_party/python/urllib3/urllib3/util/timeout.py
++++ b/third_party/python/urllib3/urllib3/util/timeout.py
+@@ -1,15 +1,14 @@
+ from __future__ import absolute_import
+ 
+ import time
+ 
+-# The default socket timeout, used by httplib to indicate that no timeout was
+-# specified by the user
+-from socket import _GLOBAL_DEFAULT_TIMEOUT
++# The default socket timeout, used by httplib to indicate that no timeout was; specified by the user
++from socket import _GLOBAL_DEFAULT_TIMEOUT, getdefaulttimeout
+ 
+ from ..exceptions import TimeoutStateError
+ 
+ # A sentinel value to indicate that no timeout was specified by the user in
+ # urllib3
+ _Default = object()
+ 
+ 
+@@ -112,16 +111,20 @@ class Timeout(object):
+             self._read,
+             self.total,
+         )
+ 
+     # __str__ provided for backwards compatibility
+     __str__ = __repr__
+ 
+     @classmethod
++    def resolve_default_timeout(cls, timeout):
++        return getdefaulttimeout() if timeout is cls.DEFAULT_TIMEOUT else timeout
++
++    @classmethod
+     def _validate_timeout(cls, value, name):
+         """Check that a timeout attribute is valid.
+ 
+         :param value: The timeout value to validate
+         :param name: The name of the timeout attribute to validate. This is
+             used to specify in error messages.
+         :return: The validated and casted version of the given value.
+         :raises ValueError: If it is a numeric value less than or equal to
+diff --git a/third_party/python/urllib3/urllib3/util/url.py b/third_party/python/urllib3/urllib3/util/url.py
+--- a/third_party/python/urllib3/urllib3/util/url.py
++++ b/third_party/python/urllib3/urllib3/util/url.py
+@@ -45,35 +45,35 @@ LS32_PAT = "(?:{hex}:{hex}|{ipv4})".form
+     # [ *4( h16 ":" ) h16 ] "::"              ls32
+     "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s",
+     # [ *5( h16 ":" ) h16 ] "::"              h16
+     "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s",
+     # [ *6( h16 ":" ) h16 ] "::"
+     "(?:(?:%(hex)s:){0,6}%(hex)s)?::",
+ ]
+ 
+-UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~"
++UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._\-~"
+ IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")"
+ ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+"
+ IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]"
+ REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*"
+ TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$")
+ 
+ IPV4_RE = re.compile("^" + IPV4_PAT + "$")
+ IPV6_RE = re.compile("^" + IPV6_PAT + "$")
+ IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$")
+ BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$")
+ ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$")
+ 
+-SUBAUTHORITY_PAT = (u"^(?:(.*)@)?(%s|%s|%s)(?::([0-9]{0,5}))?$") % (
++_HOST_PORT_PAT = ("^(%s|%s|%s)(?::0*?(|0|[1-9][0-9]{0,4}))?$") % (
+     REG_NAME_PAT,
+     IPV4_PAT,
+     IPV6_ADDRZ_PAT,
+ )
+-SUBAUTHORITY_RE = re.compile(SUBAUTHORITY_PAT, re.UNICODE | re.DOTALL)
++_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL)
+ 
+ UNRESERVED_CHARS = set(
+     "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~"
+ )
+ SUB_DELIM_CHARS = set("!$&'()*+,;=")
+ USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"}
+ PATH_CHARS = USERINFO_CHARS | {"@", "/"}
+ QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"}
+@@ -274,16 +274,19 @@ def _remove_path_dot_segments(path):
+ def _normalize_host(host, scheme):
+     if host:
+         if isinstance(host, six.binary_type):
+             host = six.ensure_str(host)
+ 
+         if scheme in NORMALIZABLE_SCHEMES:
+             is_ipv6 = IPV6_ADDRZ_RE.match(host)
+             if is_ipv6:
++                # IPv6 hosts of the form 'a::b%zone' are encoded in a URL as
++                # such per RFC 6874: 'a::b%25zone'. Unquote the ZoneID
++                # separator as necessary to return a valid RFC 4007 scoped IP.
+                 match = ZONE_ID_RE.search(host)
+                 if match:
+                     start, end = match.span(1)
+                     zone_id = host[start:end]
+ 
+                     if zone_id.startswith("%25") and zone_id != "%25":
+                         zone_id = zone_id[3:]
+                     else:
+@@ -295,17 +298,17 @@ def _normalize_host(host, scheme):
+             elif not IPV4_RE.match(host):
+                 return six.ensure_str(
+                     b".".join([_idna_encode(label) for label in host.split(".")])
+                 )
+     return host
+ 
+ 
+ def _idna_encode(name):
+-    if name and any([ord(x) > 128 for x in name]):
++    if name and any(ord(x) >= 128 for x in name):
+         try:
+             import idna
+         except ImportError:
+             six.raise_from(
+                 LocationParseError("Unable to parse URL without the 'idna' module"),
+                 None,
+             )
+         try:
+@@ -326,17 +329,17 @@ def _encode_target(target):
+         target += "?" + query
+     return target
+ 
+ 
+ def parse_url(url):
+     """
+     Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is
+     performed to parse incomplete urls. Fields not provided will be None.
+-    This parser is RFC 3986 compliant.
++    This parser is RFC 3986 and RFC 6874 compliant.
+ 
+     The parser logic and helper functions are based heavily on
+     work done in the ``rfc3986`` module.
+ 
+     :param str url: URL to parse into a :class:`.Url` namedtuple.
+ 
+     Partly backwards-compatible with :mod:`urlparse`.
+ 
+@@ -360,17 +363,19 @@ def parse_url(url):
+     try:
+         scheme, authority, path, query, fragment = URI_RE.match(url).groups()
+         normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES
+ 
+         if scheme:
+             scheme = scheme.lower()
+ 
+         if authority:
+-            auth, host, port = SUBAUTHORITY_RE.match(authority).groups()
++            auth, _, host_port = authority.rpartition("@")
++            auth = auth or None
++            host, port = _HOST_PORT_RE.match(host_port).groups()
+             if auth and normalize_uri:
+                 auth = _encode_invalid_chars(auth, USERINFO_CHARS)
+             if port == "":
+                 port = None
+         else:
+             auth, host, port = None, None, None
+ 
+         if port is not None:
+diff --git a/third_party/python/urllib3/urllib3/util/wait.py b/third_party/python/urllib3/urllib3/util/wait.py
+--- a/third_party/python/urllib3/urllib3/util/wait.py
++++ b/third_party/python/urllib3/urllib3/util/wait.py
+@@ -37,17 +37,16 @@ class NoWayToWaitForSocketError(Exceptio
+ # So: on Windows we use select(), and everywhere else we use poll(). We also
+ # fall back to select() in case poll() is somehow broken or missing.
+ 
+ if sys.version_info >= (3, 5):
+     # Modern Python, that retries syscalls by default
+     def _retry_on_intr(fn, timeout):
+         return fn(timeout)
+ 
+-
+ else:
+     # Old and broken Pythons.
+     def _retry_on_intr(fn, timeout):
+         if timeout is None:
+             deadline = float("inf")
+         else:
+             deadline = monotonic() + timeout
+ 

+ 7 - 7
mozilla-release/patches/TOP-NOBUG-PLASTER-Stylo-25314.patch

@@ -1,7 +1,7 @@
 # HG changeset patch
 # HG changeset patch
 # User Frank-Rainer Grahl <frgrahl@gmx.net>
 # User Frank-Rainer Grahl <frgrahl@gmx.net>
 # Date 1658061774 -7200
 # Date 1658061774 -7200
-# Parent  b9c7377539db3b65d2d5df1f3d7edcca602881a8
+# Parent  663dd60eb3dda78bb3218721a11abd6cff1afd3d
 No Bug - Hack around enable-stylo failing in mach configure. r=me a=me
 No Bug - Hack around enable-stylo failing in mach configure. r=me a=me
 
 
 Needs more ports in the build system to work ok out of the box.
 Needs more ports in the build system to work ok out of the box.
@@ -9,7 +9,7 @@ Needs more ports in the build system to work ok out of the box.
 diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen.configure
 diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen.configure
 --- a/build/moz.configure/bindgen.configure
 --- a/build/moz.configure/bindgen.configure
 +++ b/build/moz.configure/bindgen.configure
 +++ b/build/moz.configure/bindgen.configure
-@@ -46,19 +46,19 @@ def llvm_config_paths(host):
+@@ -110,19 +110,19 @@ def llvm_config_paths(host):
  
  
      return llvm_config_progs
      return llvm_config_progs
  
  
@@ -31,14 +31,14 @@ diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen
      lines = check_cmd_output(llvm_config, *options).splitlines()
      lines = check_cmd_output(llvm_config, *options).splitlines()
      return lines[0]
      return lines[0]
  
  
-@@ -102,29 +102,19 @@ def bindgen_config_paths(llvm_config, li
+@@ -166,29 +166,19 @@ def bindgen_config_paths(llvm_config, li
          # At least one of the choices must be found.
          # At least one of the choices must be found.
          for choice in libclang_choices:
          for choice in libclang_choices:
              libclang = os.path.join(path, choice)
              libclang = os.path.join(path, choice)
              if os.path.exists(libclang):
              if os.path.exists(libclang):
-                 return (True, None)
+                 return (libclang, None)
          else:
          else:
-             return (False, list(set(libclang_choices)))
+             return (None, list(set(libclang_choices)))
  
  
 -    # XXX: we want this code to be run for both Gecko and JS, but we don't
 -    # XXX: we want this code to be run for both Gecko and JS, but we don't
 -    # necessarily want to force a bindgen/Rust dependency on JS just yet.
 -    # necessarily want to force a bindgen/Rust dependency on JS just yet.
@@ -61,9 +61,9 @@ diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen
              PATH may expose 'clang' as well, potentially altering your compiler,
              PATH may expose 'clang' as well, potentially altering your compiler,
              which may not be what you intended.'''))
              which may not be what you intended.'''))
  
  
-@@ -166,19 +156,16 @@ def bindgen_config_paths(llvm_config, li
- 
+@@ -231,19 +221,16 @@ def bindgen_config_paths(llvm_config, li
          return namespace(
          return namespace(
+             libclang=libclang,
              libclang_path=libclang_path,
              libclang_path=libclang_path,
              clang_path=clang_resolved,
              clang_path=clang_resolved,
          )
          )

+ 5 - 0
mozilla-release/patches/series

@@ -5648,10 +5648,12 @@ NOBUG-removenonascii67a1-25314.patch
 1550532-68a1.patch
 1550532-68a1.patch
 1550868-68a1.patch
 1550868-68a1.patch
 1547196-68a1.patch
 1547196-68a1.patch
+1551618-68a1.patch
 1505471-68a1.patch
 1505471-68a1.patch
 1552336-68a1.patch
 1552336-68a1.patch
 1552430-68a1.patch
 1552430-68a1.patch
 1521996-1-PARTIAL-68a1.patch
 1521996-1-PARTIAL-68a1.patch
+1552476-68a1.patch
 1547698-1-68a1.patch
 1547698-1-68a1.patch
 1547698-2-68a1.patch
 1547698-2-68a1.patch
 1547698-3-68a1.patch
 1547698-3-68a1.patch
@@ -6631,6 +6633,7 @@ NOBUG-removenonascii67a1-25314.patch
 1642505-79a1.patch
 1642505-79a1.patch
 1639406-79a1.patch
 1639406-79a1.patch
 1633937-79a1.patch
 1633937-79a1.patch
+1641806-79a1.patch
 1643229-79a1.patch
 1643229-79a1.patch
 1632080-79a1.patch
 1632080-79a1.patch
 1643211-79a1.patch
 1643211-79a1.patch
@@ -7615,3 +7618,5 @@ TOP-NOBUG-fixnasmcheck-25320.patch
 1902935-seamonkey-credits-25320.patch
 1902935-seamonkey-credits-25320.patch
 1862395-incorrect-version-resistfingerprinting-v2-25320.patch
 1862395-incorrect-version-resistfingerprinting-v2-25320.patch
 1737436-use-mozilla-compat-version-define-25320.patch
 1737436-use-mozilla-compat-version-define-25320.patch
+1715900-99a1.patch
+1857492-120a1.patch