Skip to content

Upgrade vendored Requests and urllib3 #10759

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Jan 21, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions news/requests.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade Requests to 2.27.1
1 change: 1 addition & 0 deletions news/urllib3.vendor.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Upgrade urllib3 to 1.26.8
2 changes: 1 addition & 1 deletion src/pip/_vendor/requests/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ def _check_cryptography(cryptography_version):
from .exceptions import (
RequestException, Timeout, URLRequired,
TooManyRedirects, HTTPError, ConnectionError,
FileModeWarning, ConnectTimeout, ReadTimeout
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
)

# Set default logging handler to avoid "No handler found" warnings.
Expand Down
6 changes: 3 additions & 3 deletions src/pip/_vendor/requests/__version__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@
__title__ = 'requests'
__description__ = 'Python HTTP for Humans.'
__url__ = 'https://requests.readthedocs.io'
__version__ = '2.26.0'
__build__ = 0x022600
__version__ = '2.27.1'
__build__ = 0x022701
__author__ = 'Kenneth Reitz'
__author_email__ = '[email protected]'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2020 Kenneth Reitz'
__copyright__ = 'Copyright 2022 Kenneth Reitz'
__cake__ = u'\u2728 \U0001f370 \u2728'
9 changes: 7 additions & 2 deletions src/pip/_vendor/requests/adapters.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
from pip._vendor.urllib3.exceptions import ClosedPoolError
from pip._vendor.urllib3.exceptions import ConnectTimeoutError
from pip._vendor.urllib3.exceptions import HTTPError as _HTTPError
from pip._vendor.urllib3.exceptions import InvalidHeader as _InvalidHeader
from pip._vendor.urllib3.exceptions import MaxRetryError
from pip._vendor.urllib3.exceptions import NewConnectionError
from pip._vendor.urllib3.exceptions import ProxyError as _ProxyError
Expand All @@ -37,7 +38,7 @@
from .cookies import extract_cookies_to_jar
from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError,
ProxyError, RetryError, InvalidSchema, InvalidProxyURL,
InvalidURL)
InvalidURL, InvalidHeader)
from .auth import _basic_auth_str

try:
Expand Down Expand Up @@ -457,9 +458,11 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox
low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT)

try:
skip_host = 'Host' in request.headers
low_conn.putrequest(request.method,
url,
skip_accept_encoding=True)
skip_accept_encoding=True,
skip_host=skip_host)

for header, value in request.headers.items():
low_conn.putheader(header, value)
Expand Down Expand Up @@ -527,6 +530,8 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox
raise SSLError(e, request=request)
elif isinstance(e, ReadTimeoutError):
raise ReadTimeout(e, request=request)
elif isinstance(e, _InvalidHeader):
raise InvalidHeader(e, request=request)
else:
raise

Expand Down
3 changes: 2 additions & 1 deletion src/pip/_vendor/requests/compat.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,13 +50,13 @@
# Keep OrderedDict for backwards compatibility.
from collections import Callable, Mapping, MutableMapping, OrderedDict


builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
integer_types = (int, long)
JSONDecodeError = ValueError

elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
Expand All @@ -67,6 +67,7 @@
# Keep OrderedDict for backwards compatibility.
from collections import OrderedDict
from collections.abc import Callable, Mapping, MutableMapping
from json import JSONDecodeError

builtin_str = str
str = str
Expand Down
10 changes: 8 additions & 2 deletions src/pip/_vendor/requests/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
"""
from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError

from .compat import JSONDecodeError as CompatJSONDecodeError


class RequestException(IOError):
"""There was an ambiguous exception that occurred while handling your
Expand All @@ -29,6 +31,10 @@ class InvalidJSONError(RequestException):
"""A JSON error occurred."""


class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
"""Couldn't decode the text into json"""


class HTTPError(RequestException):
"""An HTTP error occurred."""

Expand Down Expand Up @@ -74,11 +80,11 @@ class TooManyRedirects(RequestException):


class MissingSchema(RequestException, ValueError):
"""The URL schema (e.g. http or https) is missing."""
"""The URL scheme (e.g. http or https) is missing."""


class InvalidSchema(RequestException, ValueError):
"""See defaults.py for valid schemas."""
"""The URL scheme provided is either invalid or unsupported."""


class InvalidURL(RequestException, ValueError):
Expand Down
33 changes: 20 additions & 13 deletions src/pip/_vendor/requests/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@
from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .exceptions import (
HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError,
ContentDecodingError, ConnectionError, StreamConsumedError, InvalidJSONError)
ContentDecodingError, ConnectionError, StreamConsumedError,
InvalidJSONError)
from .exceptions import JSONDecodeError as RequestsJSONDecodeError
from ._internal_utils import to_native_string, unicode_is_ascii
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
Expand All @@ -38,7 +40,7 @@
from .compat import (
Callable, Mapping,
cookielib, urlunparse, urlsplit, urlencode, str, bytes,
is_py2, chardet, builtin_str, basestring)
is_py2, chardet, builtin_str, basestring, JSONDecodeError)
from .compat import json as complexjson
from .status_codes import codes

Expand Down Expand Up @@ -384,7 +386,7 @@ def prepare_url(self, url, params):
raise InvalidURL(*e.args)

if not scheme:
error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?")
error = error.format(to_native_string(url, 'utf8'))

raise MissingSchema(error)
Expand All @@ -401,7 +403,7 @@ def prepare_url(self, url, params):
host = self._get_idna_encoded_host(host)
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
elif host.startswith(u'*'):
elif host.startswith((u'*', u'.')):
raise InvalidURL('URL has an invalid label.')

# Carefully reconstruct the network location
Expand Down Expand Up @@ -468,9 +470,9 @@ def prepare_body(self, data, files, json=None):
content_type = 'application/json'

try:
body = complexjson.dumps(json, allow_nan=False)
body = complexjson.dumps(json, allow_nan=False)
except ValueError as ve:
raise InvalidJSONError(ve, request=self)
raise InvalidJSONError(ve, request=self)

if not isinstance(body, bytes):
body = body.encode('utf-8')
Expand Down Expand Up @@ -882,12 +884,8 @@ def json(self, **kwargs):
r"""Returns the json-encoded content of a response, if any.

:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
:raises simplejson.JSONDecodeError: If the response body does not
contain valid json and simplejson is installed.
:raises json.JSONDecodeError: If the response body does not contain
valid json and simplejson is not installed on Python 3.
:raises ValueError: If the response body does not contain valid
json and simplejson is not installed on Python 2.
:raises requests.exceptions.JSONDecodeError: If the response body does not
contain valid json.
"""

if not self.encoding and self.content and len(self.content) > 3:
Expand All @@ -907,7 +905,16 @@ def json(self, **kwargs):
# and the server didn't bother to tell us what codec *was*
# used.
pass
return complexjson.loads(self.text, **kwargs)

try:
return complexjson.loads(self.text, **kwargs)
except JSONDecodeError as e:
# Catch JSON-related errors and raise as requests.JSONDecodeError
# This aliases json.JSONDecodeError and simplejson.JSONDecodeError
if is_py2: # e is a ValueError
raise RequestsJSONDecodeError(e.message)
else:
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)

@property
def links(self):
Expand Down
24 changes: 7 additions & 17 deletions src/pip/_vendor/requests/sessions.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@

from .utils import (
requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies,
get_auth_from_url, rewind_body
get_auth_from_url, rewind_body, resolve_proxies
)

from .status_codes import codes
Expand Down Expand Up @@ -269,7 +269,6 @@ def rebuild_auth(self, prepared_request, response):
if new_auth is not None:
prepared_request.prepare_auth(new_auth)


def rebuild_proxies(self, prepared_request, proxies):
"""This method re-evaluates the proxy configuration by considering the
environment variables. If we are redirected to a URL covered by
Expand All @@ -282,21 +281,9 @@ def rebuild_proxies(self, prepared_request, proxies):

:rtype: dict
"""
proxies = proxies if proxies is not None else {}
headers = prepared_request.headers
url = prepared_request.url
scheme = urlparse(url).scheme
new_proxies = proxies.copy()
no_proxy = proxies.get('no_proxy')

bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy)
if self.trust_env and not bypass_proxy:
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)

proxy = environ_proxies.get(scheme, environ_proxies.get('all'))

if proxy:
new_proxies.setdefault(scheme, proxy)
scheme = urlparse(prepared_request.url).scheme
new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env)

if 'Proxy-Authorization' in headers:
del headers['Proxy-Authorization']
Expand Down Expand Up @@ -633,7 +620,10 @@ def send(self, request, **kwargs):
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
kwargs.setdefault('proxies', self.rebuild_proxies(request, self.proxies))
if 'proxies' not in kwargs:
kwargs['proxies'] = resolve_proxies(
request, self.proxies, self.trust_env
)

# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
Expand Down
63 changes: 55 additions & 8 deletions src/pip/_vendor/requests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import zipfile
from collections import OrderedDict
from pip._vendor.urllib3.util import make_headers
from pip._vendor.urllib3.util import parse_url

from .__version__ import __version__
from . import certs
Expand Down Expand Up @@ -124,7 +125,10 @@ def super_len(o):
elif hasattr(o, 'fileno'):
try:
fileno = o.fileno()
except io.UnsupportedOperation:
except (io.UnsupportedOperation, AttributeError):
# AttributeError is a surprising exception, seeing as how we've just checked
# that `hasattr(o, 'fileno')`. It happens for objects obtained via
# `Tarfile.extractfile()`, per issue 5229.
pass
else:
total_length = os.fstat(fileno).st_size
Expand Down Expand Up @@ -154,7 +158,7 @@ def super_len(o):
current_position = total_length
else:
if hasattr(o, 'seek') and total_length is None:
# StringIO and BytesIO have seek but no useable fileno
# StringIO and BytesIO have seek but no usable fileno
try:
# seek to end of file
o.seek(0, 2)
Expand Down Expand Up @@ -251,6 +255,10 @@ def extract_zipped_paths(path):
archive, member = os.path.split(path)
while archive and not os.path.exists(archive):
archive, prefix = os.path.split(archive)
if not prefix:
# If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split),
# we _can_ end up in an infinite loop on a rare corner case affecting a small number of users
break
member = '/'.join([prefix, member])

if not zipfile.is_zipfile(archive):
Expand Down Expand Up @@ -826,6 +834,33 @@ def select_proxy(url, proxies):
return proxy


def resolve_proxies(request, proxies, trust_env=True):
"""This method takes proxy information from a request and configuration
input to resolve a mapping of target proxies. This will consider settings
such a NO_PROXY to strip proxy configurations.

:param request: Request or PreparedRequest
:param proxies: A dictionary of schemes or schemes and hosts to proxy URLs
:param trust_env: Boolean declaring whether to trust environment configs

:rtype: dict
"""
proxies = proxies if proxies is not None else {}
url = request.url
scheme = urlparse(url).scheme
no_proxy = proxies.get('no_proxy')
new_proxies = proxies.copy()

if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy):
environ_proxies = get_environ_proxies(url, no_proxy=no_proxy)

proxy = environ_proxies.get(scheme, environ_proxies.get('all'))

if proxy:
new_proxies.setdefault(scheme, proxy)
return new_proxies


def default_user_agent(name="python-requests"):
"""
Return a string representing the default user agent.
Expand Down Expand Up @@ -928,15 +963,27 @@ def prepend_scheme_if_needed(url, new_scheme):

:rtype: str
"""
scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme)

# urlparse is a finicky beast, and sometimes decides that there isn't a
# netloc present. Assume that it's being over-cautious, and switch netloc
# and path if urlparse decided there was no netloc.
parsed = parse_url(url)
scheme, auth, host, port, path, query, fragment = parsed

# A defect in urlparse determines that there isn't a netloc present in some
# urls. We previously assumed parsing was overly cautious, and swapped the
# netloc and path. Due to a lack of tests on the original defect, this is
# maintained with parse_url for backwards compatibility.
netloc = parsed.netloc
if not netloc:
netloc, path = path, netloc

return urlunparse((scheme, netloc, path, params, query, fragment))
if auth:
# parse_url doesn't provide the netloc with auth
# so we'll add it ourselves.
netloc = '@'.join([auth, netloc])
if scheme is None:
scheme = new_scheme
if path is None:
path = ''

return urlunparse((scheme, netloc, path, '', query, fragment))


def get_auth_from_url(url):
Expand Down
2 changes: 1 addition & 1 deletion src/pip/_vendor/urllib3/_version.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
# This file is protected via CODEOWNERS
__version__ = "1.26.7"
__version__ = "1.26.8"
2 changes: 1 addition & 1 deletion src/pip/_vendor/urllib3/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ class BrokenPipeError(Exception):
SubjectAltNameWarning,
SystemTimeWarning,
)
from .packages.ssl_match_hostname import CertificateError, match_hostname
from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
from .util.ssl_ import (
assert_fingerprint,
Expand All @@ -61,6 +60,7 @@ class BrokenPipeError(Exception):
resolve_ssl_version,
ssl_wrap_socket,
)
from .util.ssl_match_hostname import CertificateError, match_hostname

log = logging.getLogger(__name__)

Expand Down
Loading