Skip to content

Test on urllib3 1.26.x #6757

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Jul 30, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 20 additions & 0 deletions .github/workflows/run-tests.yml
Original file line number Diff line number Diff line change
@@ -57,3 +57,23 @@ jobs:
- name: Run tests
run: |
make ci

urllib3:
name: 'urllib3 1.x'
runs-on: 'ubuntu-latest'
strategy:
fail-fast: true

steps:
- uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608
- name: 'Set up Python 3.8'
uses: actions/setup-python@82c7e631bb3cdc910f68e0081d67478d79c6982d
with:
python-version: '3.8'
- name: Install dependencies
run: |
make
python -m pip install "urllib3<2"
- name: Run tests
run: |
make ci
12 changes: 12 additions & 0 deletions src/requests/compat.py
Original file line number Diff line number Diff line change
@@ -10,6 +10,18 @@
import importlib
import sys

# -------
# urllib3
# -------
from urllib3 import __version__ as urllib3_version

# Detect which major version of urllib3 is being used.
try:
is_urllib3_1 = int(urllib3_version.split(".")[0]) == 1
except (TypeError, AttributeError):
# If we can't discern a version, prefer old functionality.
is_urllib3_1 = True

# -------------------
# Character Detection
# -------------------
5 changes: 4 additions & 1 deletion src/requests/utils.py
Original file line number Diff line number Diff line change
@@ -38,6 +38,7 @@
getproxies,
getproxies_environment,
integer_types,
is_urllib3_1,
)
from .compat import parse_http_list as _parse_list_header
from .compat import (
@@ -136,7 +137,9 @@ def super_len(o):
total_length = None
current_position = 0

if isinstance(o, str):
if not is_urllib3_1 and isinstance(o, str):
# urllib3 2.x+ treats all strings as utf-8 instead
# of latin-1 (iso-8859-1) like http.client.
o = o.encode("utf-8")

if hasattr(o, "__len__"):
41 changes: 24 additions & 17 deletions tests/test_requests.py
Original file line number Diff line number Diff line change
@@ -25,6 +25,7 @@
builtin_str,
cookielib,
getproxies,
is_urllib3_1,
urlparse,
)
from requests.cookies import cookiejar_from_dict, morsel_to_cookie
@@ -1810,23 +1811,6 @@ def test_autoset_header_values_are_native(self, httpbin):

assert p.headers["Content-Length"] == length

def test_content_length_for_bytes_data(self, httpbin):
data = "This is a string containing multi-byte UTF-8 ☃️"
encoded_data = data.encode("utf-8")
length = str(len(encoded_data))
req = requests.Request("POST", httpbin("post"), data=encoded_data)
p = req.prepare()

assert p.headers["Content-Length"] == length

def test_content_length_for_string_data_counts_bytes(self, httpbin):
data = "This is a string containing multi-byte UTF-8 ☃️"
length = str(len(data.encode("utf-8")))
req = requests.Request("POST", httpbin("post"), data=data)
p = req.prepare()

assert p.headers["Content-Length"] == length

def test_nonhttp_schemes_dont_check_URLs(self):
test_urls = (
"data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==",
@@ -2966,6 +2950,29 @@ def response_handler(sock):
assert client_cert is not None


def test_content_length_for_bytes_data(httpbin):
data = "This is a string containing multi-byte UTF-8 ☃️"
encoded_data = data.encode("utf-8")
length = str(len(encoded_data))
req = requests.Request("POST", httpbin("post"), data=encoded_data)
p = req.prepare()

assert p.headers["Content-Length"] == length


@pytest.mark.skipif(
is_urllib3_1,
reason="urllib3 2.x encodes all strings to utf-8, urllib3 1.x uses latin-1",
)
def test_content_length_for_string_data_counts_bytes(httpbin):
data = "This is a string containing multi-byte UTF-8 ☃️"
length = str(len(data.encode("utf-8")))
req = requests.Request("POST", httpbin("post"), data=data)
p = req.prepare()

assert p.headers["Content-Length"] == length


def test_json_decode_errors_are_serializable_deserializable():
json_decode_error = requests.exceptions.JSONDecodeError(
"Extra data",