File manager - Edit - /home/u478019808/domains/bestandroidphones.store/public_html/static/img/logo/test.tar
Back
contrib/duplicate_san.pem 0000644 00000002351 15025234504 0011523 0 ustar 00 -----BEGIN CERTIFICATE----- MIIDcjCCAlqgAwIBAgICAwkwDQYJKoZIhvcNAQEFBQAwVzELMAkGA1UEBhMCVVMx DjAMBgNVBAgMBVRleGFzMQ8wDQYDVQQHDAZBdXN0aW4xDTALBgNVBAoMBFB5Q0Ex GDAWBgNVBAMMD2NyeXB0b2dyYXBoeS5pbzAeFw0wMjAxMDExMjAxMDBaFw0zMDEy MzEwODMwMDBaMFcxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEPMA0GA1UE BwwGQXVzdGluMQ0wCwYDVQQKDARQeUNBMRgwFgYDVQQDDA9jcnlwdG9ncmFwaHku aW8wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDBevx+d0dMqlqoMDYV ij/797UhaFG6IjDl1qv8wcbP71npI+oTMLxZO3OAKrYIpuSjMGUjoxFrpao5ZhRR dOE7bEnpt4Bi5EnXLvsQ/UnpH6CLltBR54Lp9avFtab3mEgnrbjnPaAPIrLv3Nt2 6rRu2tmO1lZidD/cbA4zal0M26p9wp5TY14kyHpbLEIVloBjzetoqXK6u8Hjz/AP uagONypNDCySDR6M7jM85HDcLoFFrbBb8pruHSTxQejMeEmJxYf8b7rNl58/IWPB 1ymbNlvHL/4oSOlnrtHkjcxRWzpQ7U3gT9BThGyhCiI7EMyEHMgP3r7kTzEUwT6I avWDAgMBAAGjSDBGMAwGA1UdEwEB/wQCMAAwGgYDVR0RBBMwEYIPY3J5cHRvZ3Jh cGh5LmlvMBoGA1UdEQQTMBGCD2NyeXB0b2dyYXBoeS5pbzANBgkqhkiG9w0BAQUF AAOCAQEAAzAU5U814RrWYHiRQKDBu0710/ch5Q7z9fuJx7JWcX9pr152vCiPas4D s2YWZS6sNFh6g8h4AaDXjScNar15s2WWmWV3tS5InI9eFxn4TBHSjH4a/Lxa1W0V RA1XWNZZjm7Y9psRKjwiJmnK4Lm0Xk3KWMa03SlOpXNMTCQ/7vQX54zvlnynpo2E +pN6M28rbuA5SXkMRVv77W9kYZITAPqVLI99fSA8xaYW6NpyPtWPFSGbTwmt+U9w h/Acs8RYAR5NaS/aCELTqjsKjQmZAHlmxzbMo0ueqBUyQlOFX+G7TzBW1NFrSJNj 1TbuzMOELwTO7/l+m112lNpSWiP+UQ== -----END CERTIFICATE----- contrib/test_pyopenssl.py 0000644 00000005721 15025234504 0011656 0 ustar 00 from __future__ import annotations import os from unittest import mock import pytest try: from cryptography import x509 from OpenSSL.crypto import ( # type: ignore[import-untyped] FILETYPE_PEM, load_certificate, ) from urllib3.contrib.pyopenssl import _dnsname_to_stdlib, get_subj_alt_name except ImportError: pass def setup_module() -> None: try: from urllib3.contrib.pyopenssl import inject_into_urllib3 inject_into_urllib3() except ImportError as e: pytest.skip(f"Could not import PyOpenSSL: {e!r}") def teardown_module() -> None: try: from urllib3.contrib.pyopenssl import extract_from_urllib3 extract_from_urllib3() except ImportError: pass from ..test_ssl import TestSSL # noqa: E402, F401 from ..test_util import TestUtilSSL # noqa: E402, F401 from ..with_dummyserver.test_https import ( # noqa: E402, F401 TestHTTPS_IPV4SAN, TestHTTPS_IPV6SAN, TestHTTPS_TLSv1, TestHTTPS_TLSv1_1, TestHTTPS_TLSv1_2, TestHTTPS_TLSv1_3, ) from ..with_dummyserver.test_socketlevel import ( # noqa: E402, F401 TestClientCerts, TestSNI, TestSocketClosing, ) from ..with_dummyserver.test_socketlevel import ( # noqa: E402, F401 TestSSL as TestSocketSSL, ) class TestPyOpenSSLHelpers: """ Tests for PyOpenSSL helper functions. """ def test_dnsname_to_stdlib_simple(self) -> None: """ We can convert a dnsname to a native string when the domain is simple. """ name = "उदाहरण.परीक" expected_result = "xn--p1b6ci4b4b3a.xn--11b5bs8d" assert _dnsname_to_stdlib(name) == expected_result def test_dnsname_to_stdlib_leading_period(self) -> None: """ If there is a . in front of the domain name we correctly encode it. """ name = ".उदाहरण.परीक" expected_result = ".xn--p1b6ci4b4b3a.xn--11b5bs8d" assert _dnsname_to_stdlib(name) == expected_result def test_dnsname_to_stdlib_leading_splat(self) -> None: """ If there's a wildcard character in the front of the string we handle it appropriately. """ name = "*.उदाहरण.परीक" expected_result = "*.xn--p1b6ci4b4b3a.xn--11b5bs8d" assert _dnsname_to_stdlib(name) == expected_result @mock.patch("urllib3.contrib.pyopenssl.log.warning") def test_get_subj_alt_name(self, mock_warning: mock.MagicMock) -> None: """ If a certificate has two subject alternative names, cryptography raises an x509.DuplicateExtension exception. """ path = os.path.join(os.path.dirname(__file__), "duplicate_san.pem") with open(path) as fp: cert = load_certificate(FILETYPE_PEM, fp.read()) assert get_subj_alt_name(cert) == [] assert mock_warning.call_count == 1 assert isinstance(mock_warning.call_args[0][1], x509.DuplicateExtension) contrib/test_socks.py 0000644 00000062504 15025234504 0010746 0 ustar 00 from __future__ import annotations import socket import threading import typing from socket import getaddrinfo as real_getaddrinfo from socket import timeout as SocketTimeout from test import SHORT_TIMEOUT from unittest.mock import Mock, patch import pytest import socks as py_socks # type: ignore[import-not-found] from dummyserver.socketserver import DEFAULT_CA, DEFAULT_CERTS from dummyserver.testcase import IPV4SocketDummyServerTestCase from urllib3.contrib import socks from urllib3.exceptions import ConnectTimeoutError, NewConnectionError try: import ssl from urllib3.util import ssl_ as better_ssl HAS_SSL = True except ImportError: ssl = None # type: ignore[assignment] better_ssl = None # type: ignore[assignment] HAS_SSL = False SOCKS_NEGOTIATION_NONE = b"\x00" SOCKS_NEGOTIATION_PASSWORD = b"\x02" SOCKS_VERSION_SOCKS4 = b"\x04" SOCKS_VERSION_SOCKS5 = b"\x05" def _get_free_port(host: str) -> int: """ Gets a free port by opening a socket, binding it, checking the assigned port, and then closing it. """ s = socket.socket() s.bind((host, 0)) port = s.getsockname()[1] s.close() return port # type: ignore[no-any-return] def _read_exactly(sock: socket.socket, amt: int) -> bytes: """ Read *exactly* ``amt`` bytes from the socket ``sock``. """ data = b"" while amt > 0: chunk = sock.recv(amt) data += chunk amt -= len(chunk) return data def _read_until(sock: socket.socket, char: bytes) -> bytes: """ Read from the socket until the character is received. """ chunks = [] while True: chunk = sock.recv(1) chunks.append(chunk) if chunk == char: break return b"".join(chunks) def _address_from_socket(sock: socket.socket) -> bytes | str: """ Returns the address from the SOCKS socket """ addr_type = sock.recv(1) if addr_type == b"\x01": ipv4_addr = _read_exactly(sock, 4) return socket.inet_ntoa(ipv4_addr) elif addr_type == b"\x04": ipv6_addr = _read_exactly(sock, 16) return socket.inet_ntop(socket.AF_INET6, ipv6_addr) elif addr_type == b"\x03": addr_len = ord(sock.recv(1)) return _read_exactly(sock, addr_len) else: raise RuntimeError(f"Unexpected addr type: {addr_type!r}") def _set_up_fake_getaddrinfo(monkeypatch: pytest.MonkeyPatch) -> None: # Work around https://github.com/urllib3/urllib3/pull/2034 # Nothing prevents localhost to point to two different IPs. For example, in the # Ubuntu set up by GitHub Actions, localhost points both to 127.0.0.1 and ::1. # # In case of failure, PySocks will try the same request on both IPs, but our # handle_socks[45]_negotiation functions don't handle retries, which leads either to # a deadlock or a timeout in case of a failure on the first address. # # However, some tests need to exercise failure. We don't want retries there, but # can't affect PySocks retries via its API. Instead, we monkeypatch PySocks so that # it only sees a single address, which effectively disables retries. def fake_getaddrinfo( addr: str, port: int, family: int, socket_type: int ) -> list[ tuple[ socket.AddressFamily, socket.SocketKind, int, str, tuple[str, int] | tuple[str, int, int, int], ] ]: gai_list = real_getaddrinfo(addr, port, family, socket_type) gai_list = [gai for gai in gai_list if gai[0] == socket.AF_INET] return gai_list[:1] monkeypatch.setattr(py_socks.socket, "getaddrinfo", fake_getaddrinfo) def handle_socks5_negotiation( sock: socket.socket, negotiate: bool, username: bytes | None = None, password: bytes | None = None, ) -> typing.Generator[tuple[bytes | str, int], bool, None]: """ Handle the SOCKS5 handshake. Returns a generator object that allows us to break the handshake into steps so that the test code can intervene at certain useful points. """ received_version = sock.recv(1) assert received_version == SOCKS_VERSION_SOCKS5 nmethods = ord(sock.recv(1)) methods = _read_exactly(sock, nmethods) if negotiate: assert SOCKS_NEGOTIATION_PASSWORD in methods send_data = SOCKS_VERSION_SOCKS5 + SOCKS_NEGOTIATION_PASSWORD sock.sendall(send_data) # This is the password negotiation. negotiation_version = sock.recv(1) assert negotiation_version == b"\x01" ulen = ord(sock.recv(1)) provided_username = _read_exactly(sock, ulen) plen = ord(sock.recv(1)) provided_password = _read_exactly(sock, plen) if username == provided_username and password == provided_password: sock.sendall(b"\x01\x00") else: sock.sendall(b"\x01\x01") sock.close() return else: assert SOCKS_NEGOTIATION_NONE in methods send_data = SOCKS_VERSION_SOCKS5 + SOCKS_NEGOTIATION_NONE sock.sendall(send_data) # Client sends where they want to go. received_version = sock.recv(1) command = sock.recv(1) reserved = sock.recv(1) addr = _address_from_socket(sock) port_raw = _read_exactly(sock, 2) port = (ord(port_raw[0:1]) << 8) + (ord(port_raw[1:2])) # Check some basic stuff. assert received_version == SOCKS_VERSION_SOCKS5 assert command == b"\x01" # Only support connect, not bind. assert reserved == b"\x00" # Yield the address port tuple. succeed = yield addr, port if succeed: # Hard-coded response for now. response = SOCKS_VERSION_SOCKS5 + b"\x00\x00\x01\x7f\x00\x00\x01\xea\x60" else: # Hard-coded response for now. response = SOCKS_VERSION_SOCKS5 + b"\x01\00" sock.sendall(response) def handle_socks4_negotiation( sock: socket.socket, username: bytes | None = None ) -> typing.Generator[tuple[bytes | str, int], bool, None]: """ Handle the SOCKS4 handshake. Returns a generator object that allows us to break the handshake into steps so that the test code can intervene at certain useful points. """ received_version = sock.recv(1) command = sock.recv(1) port_raw = _read_exactly(sock, 2) port = (ord(port_raw[0:1]) << 8) + (ord(port_raw[1:2])) addr_raw = _read_exactly(sock, 4) provided_username = _read_until(sock, b"\x00")[:-1] # Strip trailing null. addr: bytes | str if addr_raw == b"\x00\x00\x00\x01": # Magic string: means DNS name. addr = _read_until(sock, b"\x00")[:-1] # Strip trailing null. else: addr = socket.inet_ntoa(addr_raw) # Check some basic stuff. assert received_version == SOCKS_VERSION_SOCKS4 assert command == b"\x01" # Only support connect, not bind. if username is not None and username != provided_username: sock.sendall(b"\x00\x5d\x00\x00\x00\x00\x00\x00") sock.close() return # Yield the address port tuple. succeed = yield addr, port if succeed: response = b"\x00\x5a\xea\x60\x7f\x00\x00\x01" else: response = b"\x00\x5b\x00\x00\x00\x00\x00\x00" sock.sendall(response) class TestSOCKSProxyManager: def test_invalid_socks_version_is_valueerror(self) -> None: with pytest.raises(ValueError, match="Unable to determine SOCKS version"): socks.SOCKSProxyManager(proxy_url="http://example.org") class TestSocks5Proxy(IPV4SocketDummyServerTestCase): """ Test the SOCKS proxy in SOCKS5 mode. """ def test_basic_request(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) assert addr == "16.17.18.19" assert port == 80 with pytest.raises(StopIteration): handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks5://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: response = pm.request("GET", "http://16.17.18.19") assert response.status == 200 assert response.data == b"" assert response.headers["Server"] == "SocksTestServer" def test_local_dns(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) assert addr in ["127.0.0.1", "::1"] assert port == 80 with pytest.raises(StopIteration): handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks5://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: response = pm.request("GET", "http://localhost") assert response.status == 200 assert response.data == b"" assert response.headers["Server"] == "SocksTestServer" def test_correct_header_line(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) assert addr == b"example.com" assert port == 80 with pytest.raises(StopIteration): handler.send(True) buf = b"" while True: buf += sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break assert buf.startswith(b"GET / HTTP/1.1") assert b"Host: example.com" in buf sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks5h://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: response = pm.request("GET", "http://example.com") assert response.status == 200 def test_connection_timeouts(self) -> None: event = threading.Event() def request_handler(listener: socket.socket) -> None: event.wait() self._start_server(request_handler) proxy_url = f"socks5h://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: with pytest.raises(ConnectTimeoutError): pm.request( "GET", "http://example.com", timeout=SHORT_TIMEOUT, retries=False ) event.set() @patch("socks.create_connection") def test_socket_timeout(self, create_connection: Mock) -> None: create_connection.side_effect = SocketTimeout() proxy_url = f"socks5h://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: with pytest.raises(ConnectTimeoutError, match="timed out"): pm.request("GET", "http://example.com", retries=False) def test_connection_failure(self) -> None: event = threading.Event() def request_handler(listener: socket.socket) -> None: listener.close() event.set() self._start_server(request_handler) proxy_url = f"socks5h://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: event.wait() with pytest.raises(NewConnectionError): pm.request("GET", "http://example.com", retries=False) def test_proxy_rejection(self, monkeypatch: pytest.MonkeyPatch) -> None: _set_up_fake_getaddrinfo(monkeypatch) evt = threading.Event() def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) with pytest.raises(StopIteration): handler.send(False) evt.wait() sock.close() self._start_server(request_handler) proxy_url = f"socks5h://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: with pytest.raises(NewConnectionError): pm.request("GET", "http://example.com", retries=False) evt.set() def test_socks_with_password(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks5_negotiation( sock, negotiate=True, username=b"user", password=b"pass" ) addr, port = next(handler) assert addr == "16.17.18.19" assert port == 80 with pytest.raises(StopIteration): handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks5://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url, username="user", password="pass") as pm: response = pm.request("GET", "http://16.17.18.19") assert response.status == 200 assert response.data == b"" assert response.headers["Server"] == "SocksTestServer" def test_socks_with_auth_in_url(self) -> None: """ Test when we have auth info in url, i.e. socks5://user:pass@host:port and no username/password as params """ def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks5_negotiation( sock, negotiate=True, username=b"user", password=b"pass" ) addr, port = next(handler) assert addr == "16.17.18.19" assert port == 80 with pytest.raises(StopIteration): handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks5://user:pass@{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: response = pm.request("GET", "http://16.17.18.19") assert response.status == 200 assert response.data == b"" assert response.headers["Server"] == "SocksTestServer" def test_socks_with_invalid_password(self, monkeypatch: pytest.MonkeyPatch) -> None: _set_up_fake_getaddrinfo(monkeypatch) def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks5_negotiation( sock, negotiate=True, username=b"user", password=b"pass" ) with pytest.raises(StopIteration): next(handler) self._start_server(request_handler) proxy_url = f"socks5h://{self.host}:{self.port}" with socks.SOCKSProxyManager( proxy_url, username="user", password="badpass" ) as pm: with pytest.raises( NewConnectionError, match="SOCKS5 authentication failed" ): pm.request("GET", "http://example.com", retries=False) def test_source_address_works(self) -> None: expected_port = _get_free_port(self.host) def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] assert sock.getpeername()[0] == "127.0.0.1" assert sock.getpeername()[1] == expected_port handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) assert addr == "16.17.18.19" assert port == 80 with pytest.raises(StopIteration): handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks5://{self.host}:{self.port}" with socks.SOCKSProxyManager( proxy_url, source_address=("127.0.0.1", expected_port) ) as pm: response = pm.request("GET", "http://16.17.18.19") assert response.status == 200 class TestSOCKS4Proxy(IPV4SocketDummyServerTestCase): """ Test the SOCKS proxy in SOCKS4 mode. Has relatively fewer tests than the SOCKS5 case, mostly because once the negotiation is done the two cases behave identically. """ def test_basic_request(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks4_negotiation(sock) addr, port = next(handler) assert addr == "16.17.18.19" assert port == 80 with pytest.raises(StopIteration): handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks4://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: response = pm.request("GET", "http://16.17.18.19") assert response.status == 200 assert response.headers["Server"] == "SocksTestServer" assert response.data == b"" def test_local_dns(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks4_negotiation(sock) addr, port = next(handler) assert addr == "127.0.0.1" assert port == 80 with pytest.raises(StopIteration): handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks4://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: response = pm.request("GET", "http://localhost") assert response.status == 200 assert response.headers["Server"] == "SocksTestServer" assert response.data == b"" def test_correct_header_line(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks4_negotiation(sock) addr, port = next(handler) assert addr == b"example.com" assert port == 80 with pytest.raises(StopIteration): handler.send(True) buf = b"" while True: buf += sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break assert buf.startswith(b"GET / HTTP/1.1") assert b"Host: example.com" in buf sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks4a://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: response = pm.request("GET", "http://example.com") assert response.status == 200 def test_proxy_rejection(self, monkeypatch: pytest.MonkeyPatch) -> None: _set_up_fake_getaddrinfo(monkeypatch) evt = threading.Event() def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks4_negotiation(sock) addr, port = next(handler) with pytest.raises(StopIteration): handler.send(False) evt.wait() sock.close() self._start_server(request_handler) proxy_url = f"socks4a://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url) as pm: with pytest.raises(NewConnectionError): pm.request("GET", "http://example.com", retries=False) evt.set() def test_socks4_with_username(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks4_negotiation(sock, username=b"user") addr, port = next(handler) assert addr == "16.17.18.19" assert port == 80 with pytest.raises(StopIteration): handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(request_handler) proxy_url = f"socks4://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url, username="user") as pm: response = pm.request("GET", "http://16.17.18.19") assert response.status == 200 assert response.data == b"" assert response.headers["Server"] == "SocksTestServer" def test_socks_with_invalid_username(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks4_negotiation(sock, username=b"user") next(handler, None) self._start_server(request_handler) proxy_url = f"socks4a://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url, username="baduser") as pm: with pytest.raises(NewConnectionError, match="different user-ids"): pm.request("GET", "http://example.com", retries=False) class TestSOCKSWithTLS(IPV4SocketDummyServerTestCase): """ Test that TLS behaves properly for SOCKS proxies. """ @pytest.mark.skipif(not HAS_SSL, reason="No TLS available") def test_basic_request(self) -> None: def request_handler(listener: socket.socket) -> None: sock = listener.accept()[0] handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) assert addr == b"localhost" assert port == 443 with pytest.raises(StopIteration): handler.send(True) # Wrap in TLS context = better_ssl.SSLContext(ssl.PROTOCOL_SSLv23) # type: ignore[misc] context.load_cert_chain(DEFAULT_CERTS["certfile"], DEFAULT_CERTS["keyfile"]) tls = context.wrap_socket(sock, server_side=True) buf = b"" while True: buf += tls.recv(65535) if buf.endswith(b"\r\n\r\n"): break assert buf.startswith(b"GET / HTTP/1.1\r\n") tls.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: SocksTestServer\r\n" b"Content-Length: 0\r\n" b"\r\n" ) tls.close() sock.close() self._start_server(request_handler) proxy_url = f"socks5h://{self.host}:{self.port}" with socks.SOCKSProxyManager(proxy_url, ca_certs=DEFAULT_CA) as pm: response = pm.request("GET", "https://localhost") assert response.status == 200 assert response.data == b"" assert response.headers["Server"] == "SocksTestServer" contrib/emscripten/templates/pyodide-console.html 0000644 00000021062 15025234504 0016337 0 ustar 00 <!-- taken from https://github.com/pyodide/pyodide/blob/main/src/templates/console.html --> <!-- Copyright (C) 2019-2022, Pyodide contributors and Mozilla --> <!-- SPDX-FileCopyrightText: 2019-2022, Pyodide contributors and Mozilla --> <!-- SPDX-License-Identifier: MPL-2.0 --> <!doctype html> <html> <head> <meta charset="UTF-8" /> <script src="https://cdn.jsdelivr.net/npm/jquery"></script> <script src="https://cdn.jsdelivr.net/npm/jquery.terminal@2.35.2/js/jquery.terminal.min.js"></script> <script src="https://cdn.jsdelivr.net/npm/jquery.terminal@2.35.2/js/unix_formatting.min.js"></script> <link href="https://cdn.jsdelivr.net/npm/jquery.terminal@2.35.2/css/jquery.terminal.min.css" rel="stylesheet" /> <style> .terminal { --size: 1.5; --color: rgba(255, 255, 255, 0.8); } .noblink { --animation: terminal-none; } body { background-color: black; } #jquery-terminal-logo { color: white; border-color: white; position: absolute; top: 7px; right: 18px; z-index: 2; } #jquery-terminal-logo a { color: gray; text-decoration: none; font-size: 0.7em; } #loading { display: inline-block; width: 50px; height: 50px; position: fixed; top: 50%; left: 50%; border: 3px solid rgba(172, 237, 255, 0.5); border-radius: 50%; border-top-color: #fff; animation: spin 1s ease-in-out infinite; -webkit-animation: spin 1s ease-in-out infinite; } @keyframes spin { to { -webkit-transform: rotate(360deg); } } @-webkit-keyframes spin { to { -webkit-transform: rotate(360deg); } } </style> </head> <body> <div id="jquery-terminal-logo"> <a href="https://terminal.jcubic.pl/">jQuery Terminal</a> </div> <div id="loading"></div> <script> "use strict"; function sleep(s) { return new Promise((resolve) => setTimeout(resolve, s)); } async function main() { let indexURL = "https://cdn.jsdelivr.net/pyodide/v0.25.0/full/"; const urlParams = new URLSearchParams(window.location.search); const buildParam = urlParams.get("build"); if (buildParam) { if (["full", "debug", "pyc"].includes(buildParam)) { indexURL = indexURL.replace( "/full/", "/" + urlParams.get("build") + "/", ); } else { console.warn( 'Invalid URL parameter: build="' + buildParam + '". Using default "full".', ); } } const { loadPyodide } = await import(indexURL + "pyodide.mjs"); // to facilitate debugging globalThis.loadPyodide = loadPyodide; let term; globalThis.pyodide = await loadPyodide({ stdin: () => { let result = prompt(); echo(result); return result; }, }); let { repr_shorten, BANNER, PyodideConsole } = pyodide.pyimport("pyodide.console"); BANNER = `Welcome to the Pyodide ${pyodide.version} terminal emulator 🐍\n` + BANNER; const pyconsole = PyodideConsole(pyodide.globals); const namespace = pyodide.globals.get("dict")(); const await_fut = pyodide.runPython( ` import builtins from pyodide.ffi import to_js async def await_fut(fut): res = await fut if res is not None: builtins._ = res return to_js([res], depth=1) await_fut `, { globals: namespace }, ); namespace.destroy(); const echo = (msg, ...opts) => term.echo( msg .replaceAll("]]", "]]") .replaceAll("[[", "[["), ...opts, ); const ps1 = ">>> "; const ps2 = "... "; async function lock() { let resolve; const ready = term.ready; term.ready = new Promise((res) => (resolve = res)); await ready; return resolve; } async function interpreter(command) { const unlock = await lock(); term.pause(); // multiline should be split (useful when pasting) for (const c of command.split("\n")) { const escaped = c.replaceAll(/\u00a0/g, " "); const fut = pyconsole.push(escaped); term.set_prompt(fut.syntax_check === "incomplete" ? ps2 : ps1); switch (fut.syntax_check) { case "syntax-error": term.error(fut.formatted_error.trimEnd()); continue; case "incomplete": continue; case "complete": break; default: throw new Error(`Unexpected type ${ty}`); } // In JavaScript, await automatically also awaits any results of // awaits, so if an async function returns a future, it will await // the inner future too. This is not what we want so we // temporarily put it into a list to protect it. const wrapped = await_fut(fut); // complete case, get result / error and print it. try { const [value] = await wrapped; if (value !== undefined) { echo( repr_shorten.callKwargs(value, { separator: "\n<long output truncated>\n", }), ); } if (value instanceof pyodide.ffi.PyProxy) { value.destroy(); } } catch (e) { if (e.constructor.name === "PythonError") { const message = fut.formatted_error || e.message; term.error(message.trimEnd()); } else { throw e; } } finally { fut.destroy(); wrapped.destroy(); } } term.resume(); await sleep(10); unlock(); } term = $("body").terminal(interpreter, { greetings: BANNER, prompt: ps1, completionEscape: false, completion: function (command, callback) { callback(pyconsole.complete(command).toJs()[0]); }, keymap: { "CTRL+C": async function (event, original) { pyconsole.buffer.clear(); term.enter(); echo("KeyboardInterrupt"); term.set_command(""); term.set_prompt(ps1); }, TAB: (event, original) => { const command = term.before_cursor(); // Disable completion for whitespaces. if (command.trim() === "") { term.insert("\t"); return false; } return original(event); }, }, }); window.term = term; pyconsole.stdout_callback = (s) => echo(s, { newline: false }); pyconsole.stderr_callback = (s) => { term.error(s.trimEnd()); }; term.ready = Promise.resolve(); pyodide._api.on_fatal = async (e) => { if (e.name === "Exit") { term.error(e); term.error("Pyodide exited and can no longer be used."); } else { term.error( "Pyodide has suffered a fatal error. Please report this to the Pyodide maintainers.", ); term.error("The cause of the fatal error was:"); term.error(e); term.error("Look in the browser console for more details."); } await term.ready; term.pause(); await sleep(15); term.pause(); }; const searchParams = new URLSearchParams(window.location.search); if (searchParams.has("noblink")) { $(".cmd-cursor").addClass("noblink"); } await term.ready; await term.exec("import micropip\n"); await term.exec("micropip.list()\n"); await term.exec('await micropip.install("http://localhost:8000/urllib3-2.2.0-py3-none-any.whl")') await term.exec("micropip.list()"); await term.exec("import urllib3"); await term.exec("urllib3.__version__"); } window.console_ready = main(); </script> </body> </html> contrib/emscripten/test_emscripten.py 0000644 00000107322 15025234504 0014144 0 ustar 00 from __future__ import annotations import sys import typing import pytest from urllib3.fields import _TYPE_FIELD_VALUE_TUPLE from ...port_helpers import find_unused_port if sys.version_info < (3, 11): # pyodide only works on 3.11+ pytest.skip(allow_module_level=True) # only run these tests if pytest_pyodide is installed # so we don't break non-emscripten pytest running pytest_pyodide = pytest.importorskip("pytest_pyodide") from pytest_pyodide import run_in_pyodide # type: ignore[import-not-found] # noqa: E402 from pytest_pyodide.decorator import ( # type: ignore[import-not-found] # noqa: E402 copy_files_to_pyodide, ) from .conftest import PyodideServerInfo, ServerRunnerInfo # noqa: E402 # make our ssl certificates work in chrome pytest_pyodide.runner.CHROME_FLAGS.append("ignore-certificate-errors") # copy our wheel file to pyodide and install it def install_urllib3_wheel() -> ( typing.Callable[ [typing.Callable[..., typing.Any]], typing.Callable[..., typing.Any] ] ): return copy_files_to_pyodide( # type: ignore[no-any-return] file_list=[("dist/*.whl", "/tmp")], install_wheels=True ) @install_urllib3_wheel() def test_index( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] from urllib3.connection import HTTPConnection from urllib3.response import BaseHTTPResponse conn = HTTPConnection(host, port) url = f"http://{host}:{port}/" conn.request("GET", url) response = conn.getresponse() # check methods of response assert isinstance(response, BaseHTTPResponse) assert response.url == url response.url = "http://woo" assert response.url == "http://woo" assert response.connection == conn assert response.retries is None data1 = response.data decoded1 = data1.decode("utf-8") data2 = response.data # check that getting data twice works decoded2 = data2.decode("utf-8") assert decoded1 == decoded2 == "Dummy server!" pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) @install_urllib3_wheel() def test_pool_requests( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int, https_port: int) -> None: # type: ignore[no-untyped-def] # first with PoolManager import urllib3 http = urllib3.PoolManager() resp = http.request("GET", f"http://{host}:{port}/") assert resp.data.decode("utf-8") == "Dummy server!" resp2 = http.request("GET", f"http://{host}:{port}/index") assert resp2.data.decode("utf-8") == "Dummy server!" # should all have come from one pool assert len(http.pools) == 1 resp3 = http.request("GET", f"https://{host}:{https_port}/") assert resp2.data.decode("utf-8") == "Dummy server!" # one http pool + one https pool assert len(http.pools) == 2 # now with ConnectionPool # because block == True, this will fail if the connection isn't # returned to the pool correctly after the first request pool = urllib3.HTTPConnectionPool(host, port, maxsize=1, block=True) resp3 = pool.urlopen("GET", "/index") assert resp3.data.decode("utf-8") == "Dummy server!" resp4 = pool.urlopen("GET", "/") assert resp4.data.decode("utf-8") == "Dummy server!" # now with manual release of connection # first - connection should be released once all # data is read pool2 = urllib3.HTTPConnectionPool(host, port, maxsize=1, block=True) resp5 = pool2.urlopen("GET", "/index", preload_content=False) assert pool2.pool is not None # at this point, the connection should not be in the pool assert pool2.pool.qsize() == 0 assert resp5.data.decode("utf-8") == "Dummy server!" # now we've read all the data, connection should be back to the pool assert pool2.pool.qsize() == 1 resp6 = pool2.urlopen("GET", "/index", preload_content=False) assert pool2.pool.qsize() == 0 # force it back to the pool resp6.release_conn() assert pool2.pool.qsize() == 1 read_str = resp6.read() # for consistency with urllib3, this still returns the correct data even though # we are in theory not using the connection any more assert read_str.decode("utf-8") == "Dummy server!" pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port, testserver_http.https_port, ) # wrong protocol / protocol error etc. should raise an exception of http.client.HTTPException @install_urllib3_wheel() def test_wrong_protocol( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import http.client import pytest from urllib3.connection import HTTPConnection conn = HTTPConnection(host, port) with pytest.raises(http.client.HTTPException): conn.request("GET", f"http://{host}:{port}/") pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.https_port ) # wrong protocol / protocol error etc. should raise an exception of http.client.HTTPException @install_urllib3_wheel() def test_bad_method( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide(packages=("pytest",)) # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import http.client import pytest from urllib3.connection import HTTPConnection conn = HTTPConnection(host, port) with pytest.raises(http.client.HTTPException): conn.request("TRACE", f"http://{host}:{port}/") pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.https_port ) # no connection - should raise @install_urllib3_wheel() def test_no_response( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide(packages=("pytest",)) # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import http.client import pytest from urllib3.connection import HTTPConnection conn = HTTPConnection(host, port) with pytest.raises(http.client.HTTPException): conn.request("GET", f"http://{host}:{port}/") _ = conn.getresponse() pyodide_test(selenium_coverage, testserver_http.http_host, find_unused_port()) @install_urllib3_wheel() def test_404(selenium_coverage: typing.Any, testserver_http: PyodideServerInfo) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] from urllib3.connection import HTTPConnection from urllib3.response import BaseHTTPResponse conn = HTTPConnection(host, port) conn.request("GET", f"http://{host}:{port}/status?status=404 NOT FOUND") response = conn.getresponse() assert isinstance(response, BaseHTTPResponse) assert response.status == 404 pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) # setting timeout should show a warning to js console # if we're on the ui thread, because XMLHttpRequest doesn't # support timeout in async mode if globalThis == Window @install_urllib3_wheel() def test_timeout_warning( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide() # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import js # type: ignore[import-not-found] import urllib3.contrib.emscripten.fetch from urllib3.connection import HTTPConnection old_log = js.console.warn log_msgs = [] def capture_log(*args): # type: ignore[no-untyped-def] log_msgs.append(str(args)) old_log(*args) js.console.warn = capture_log conn = HTTPConnection(host, port, timeout=1.0) conn.request("GET", f"http://{host}:{port}/") conn.getresponse() js.console.warn = old_log # should have shown timeout warning exactly once by now assert len([x for x in log_msgs if x.find("Warning: Timeout") != -1]) == 1 assert urllib3.contrib.emscripten.fetch._SHOWN_TIMEOUT_WARNING pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) @install_urllib3_wheel() def test_timeout_in_worker_non_streaming( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: worker_code = f""" import pyodide_js as pjs await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) from urllib3.exceptions import TimeoutError from urllib3.connection import HTTPConnection conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0) result=-1 try: conn.request("GET","/slow") _response = conn.getresponse() result=-3 except TimeoutError as e: result=1 # we've got the correct exception except BaseException as e: result=-2 assert result == 1 """ run_from_server.run_webworker(worker_code) @install_urllib3_wheel() def test_timeout_in_worker_streaming( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: worker_code = f""" import pyodide_js as pjs await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) import urllib3.contrib.emscripten.fetch await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() from urllib3.exceptions import TimeoutError from urllib3.connection import HTTPConnection conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0) result=-1 try: conn.request("GET","/slow",preload_content=False) _response = conn.getresponse() result=-3 except TimeoutError as e: result=1 # we've got the correct exception except BaseException as e: result=-2 assert result == 1 """ run_from_server.run_webworker(worker_code) @install_urllib3_wheel() def test_index_https( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] from urllib3.connection import HTTPSConnection from urllib3.response import BaseHTTPResponse conn = HTTPSConnection(host, port) conn.request("GET", f"https://{host}:{port}/") response = conn.getresponse() assert isinstance(response, BaseHTTPResponse) data = response.data assert data.decode("utf-8") == "Dummy server!" pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.https_port ) @install_urllib3_wheel() def test_non_streaming_no_fallback_warning( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import js import urllib3.contrib.emscripten.fetch from urllib3.connection import HTTPSConnection from urllib3.response import BaseHTTPResponse log_msgs = [] old_log = js.console.warn def capture_log(*args): # type: ignore[no-untyped-def] log_msgs.append(str(args)) old_log(*args) js.console.warn = capture_log conn = HTTPSConnection(host, port) conn.request("GET", f"https://{host}:{port}/", preload_content=True) response = conn.getresponse() js.console.warn = old_log assert isinstance(response, BaseHTTPResponse) data = response.data assert data.decode("utf-8") == "Dummy server!" # no console warnings because we didn't ask it to stream the response # check no log messages assert ( len([x for x in log_msgs if x.find("Can't stream HTTP requests") != -1]) == 0 ) assert not urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.https_port ) @install_urllib3_wheel() def test_streaming_fallback_warning( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import js import urllib3.contrib.emscripten.fetch from urllib3.connection import HTTPSConnection from urllib3.response import BaseHTTPResponse # monkeypatch is_cross_origin_isolated so that it warns about that # even if we're serving it so it is fine urllib3.contrib.emscripten.fetch.is_cross_origin_isolated = lambda: False log_msgs = [] old_log = js.console.warn def capture_log(*args): # type: ignore[no-untyped-def] log_msgs.append(str(args)) old_log(*args) js.console.warn = capture_log conn = HTTPSConnection(host, port) conn.request("GET", f"https://{host}:{port}/", preload_content=False) response = conn.getresponse() js.console.warn = old_log assert isinstance(response, BaseHTTPResponse) data = response.data assert data.decode("utf-8") == "Dummy server!" # check that it has warned about falling back to non-streaming fetch exactly once assert ( len([x for x in log_msgs if x.find("Can't stream HTTP requests") != -1]) == 1 ) assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.https_port ) @install_urllib3_wheel() def test_specific_method( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] from urllib3 import HTTPSConnectionPool with HTTPSConnectionPool(host, port) as pool: path = "/specific_method?method=POST" response = pool.request("POST", path) assert response.status == 200 response = pool.request("PUT", path) assert response.status == 400 pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.https_port ) @install_urllib3_wheel() def test_streaming_download( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: # test streaming download, which must be in a webworker # as you can't do it on main thread # this should return the 17mb big file, and # should not log any warning about falling back bigfile_url = ( f"http://{testserver_http.http_host}:{testserver_http.http_port}/bigfile" ) worker_code = f""" import pyodide_js as pjs await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) import urllib3.contrib.emscripten.fetch await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() from urllib3.response import BaseHTTPResponse from urllib3.connection import HTTPConnection conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) conn.request("GET", "{bigfile_url}",preload_content=False) response = conn.getresponse() assert isinstance(response, BaseHTTPResponse) assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING==False data=response.data.decode('utf-8') assert len(data) == 17825792 """ run_from_server.run_webworker(worker_code) @install_urllib3_wheel() def test_streaming_close( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: # test streaming download, which must be in a webworker # as you can't do it on main thread # this should return the 17mb big file, and # should not log any warning about falling back url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/" worker_code = f""" import pyodide_js as pjs await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) import urllib3.contrib.emscripten.fetch await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() from urllib3.response import BaseHTTPResponse from urllib3.connection import HTTPConnection from io import RawIOBase conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) conn.request("GET", "{url}",preload_content=False) response = conn.getresponse() # check body is a RawIOBase stream and isn't seekable, writeable body_internal = response._response.body assert(isinstance(body_internal,RawIOBase)) assert(body_internal.writable() is False) assert(body_internal.seekable() is False) assert(body_internal.readable() is True) response.drain_conn() x=response.read() assert(not x) response.close() conn.close() # try and make destructor be covered # by killing everything del response del body_internal del conn """ run_from_server.run_webworker(worker_code) @install_urllib3_wheel() def test_streaming_bad_url( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: # this should cause an error # because the protocol is bad bad_url = f"hsffsdfttp://{testserver_http.http_host}:{testserver_http.http_port}/" # this must be in a webworker # as you can't do it on main thread worker_code = f""" import pytest import pyodide_js as pjs await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) import http.client import urllib3.contrib.emscripten.fetch await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() from urllib3.response import BaseHTTPResponse from urllib3.connection import HTTPConnection conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) with pytest.raises(http.client.HTTPException): conn.request("GET", "{bad_url}",preload_content=False) """ run_from_server.run_webworker(worker_code) @install_urllib3_wheel() def test_streaming_bad_method( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: # this should cause an error # because the protocol is bad bad_url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/" # this must be in a webworker # as you can't do it on main thread worker_code = f""" import pytest import http.client import pyodide_js as pjs await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) import urllib3.contrib.emscripten.fetch await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() from urllib3.response import BaseHTTPResponse from urllib3.connection import HTTPConnection conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) with pytest.raises(http.client.HTTPException): # TRACE method should throw SecurityError in Javascript conn.request("TRACE", "{bad_url}",preload_content=False) """ run_from_server.run_webworker(worker_code) @install_urllib3_wheel() def test_streaming_notready_warning( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: # test streaming download but don't wait for # worker to be ready - should fallback to non-streaming # and log a warning file_url = f"http://{testserver_http.http_host}:{testserver_http.http_port}/" worker_code = f""" import pyodide_js as pjs await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) import js import urllib3 from urllib3.response import BaseHTTPResponse from urllib3.connection import HTTPConnection log_msgs=[] old_log=js.console.warn def capture_log(*args): log_msgs.append(str(args)) old_log(*args) js.console.warn=capture_log conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port}) conn.request("GET", "{file_url}",preload_content=False) js.console.warn=old_log response = conn.getresponse() assert isinstance(response, BaseHTTPResponse) data=response.data.decode('utf-8') assert len([x for x in log_msgs if x.find("Can't stream HTTP requests")!=-1])==1 assert urllib3.contrib.emscripten.fetch._SHOWN_STREAMING_WARNING==True """ run_from_server.run_webworker(worker_code) @install_urllib3_wheel() def test_post_receive_json( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import json from urllib3.connection import HTTPConnection from urllib3.response import BaseHTTPResponse json_data = { "Bears": "like", "to": {"eat": "buns", "with": ["marmalade", "and custard"]}, } conn = HTTPConnection(host, port) conn.request( "POST", f"http://{host}:{port}/echo_json", body=json.dumps(json_data).encode("utf-8"), headers={"Content-type": "application/json"}, ) response = conn.getresponse() assert isinstance(response, BaseHTTPResponse) data = response.json() assert data == json_data pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) @install_urllib3_wheel() def test_upload( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] from urllib3 import HTTPConnectionPool data = "I'm in ur multipart form-data, hazing a cheezburgr" fields: dict[str, _TYPE_FIELD_VALUE_TUPLE] = { "upload_param": "filefield", "upload_filename": "lolcat.txt", "filefield": ("lolcat.txt", data), } fields["upload_size"] = str(len(data)) with HTTPConnectionPool(host, port) as pool: r = pool.request("POST", "/upload", fields=fields) assert r.status == 200 pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) @install_urllib3_wheel() def test_streaming_not_ready_in_browser( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: # streaming ready should always be false # if we're in the main browser thread selenium_coverage.run_async( """ import urllib3.contrib.emscripten.fetch result=await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() assert(result is False) assert(urllib3.contrib.emscripten.fetch.streaming_ready() is None ) """ ) @install_urllib3_wheel() def test_requests_with_micropip( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: # this can't be @run_in_pyodide because of the async code selenium_coverage.run_async( f""" import micropip await micropip.install("requests") import requests import json r = requests.get("http://{testserver_http.http_host}:{testserver_http.http_port}/") assert(r.status_code == 200) assert(r.text == "Dummy server!") json_data={{"woo":"yay"}} # try posting some json with requests r = requests.post("http://{testserver_http.http_host}:{testserver_http.http_port}/echo_json",json=json_data) import js assert(r.json() == json_data) """ ) @install_urllib3_wheel() def test_open_close( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] from http.client import ResponseNotReady import pytest from urllib3.connection import HTTPConnection conn = HTTPConnection(host, port) # initially connection should be closed assert conn.is_closed is True # connection should have no response with pytest.raises(ResponseNotReady): response = conn.getresponse() # now make the response conn.request("GET", f"http://{host}:{port}/") # we never connect to proxy (or if we do, browser handles it) assert conn.has_connected_to_proxy is False # now connection should be open assert conn.is_closed is False # and should have a response response = conn.getresponse() assert response is not None conn.close() # now it is closed assert conn.is_closed is True # closed connection shouldn't have any response with pytest.raises(ResponseNotReady): conn.getresponse() pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) # check that various ways that the worker may be broken # throw exceptions nicely, by deliberately breaking things # this is for coverage @install_urllib3_wheel() def test_break_worker_streaming( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo, run_from_server: ServerRunnerInfo, ) -> None: worker_code = f""" import pyodide_js as pjs await pjs.loadPackage('http://{testserver_http.http_host}:{testserver_http.http_port}/wheel/dist.whl',deps=False) import pytest import urllib3.contrib.emscripten.fetch import js import http.client await urllib3.contrib.emscripten.fetch.wait_for_streaming_ready() from urllib3.exceptions import TimeoutError from urllib3.connection import HTTPConnection conn = HTTPConnection("{testserver_http.http_host}", {testserver_http.http_port},timeout=1.0) # make the fetch worker return a bad response by: # 1) Clearing the int buffer # in the receive stream with pytest.raises(http.client.HTTPException): conn.request("GET","/",preload_content=False) response = conn.getresponse() body_internal = response._response.body assert(body_internal.int_buffer!=None) body_internal.int_buffer=None data=response.read() # 2) Monkeypatch postMessage so that it just sets an # exception status old_pm= body_internal.worker.postMessage with pytest.raises(http.client.HTTPException): conn.request("GET","/",preload_content=False) response = conn.getresponse() # make posted messages set an exception body_internal = response._response.body def set_exception(*args): body_internal.worker.postMessage = old_pm body_internal.int_buffer[1]=4 body_internal.byte_buffer[0]=ord("W") body_internal.byte_buffer[1]=ord("O") body_internal.byte_buffer[2]=ord("O") body_internal.byte_buffer[3]=ord("!") body_internal.byte_buffer[4]=0 js.Atomics.store(body_internal.int_buffer, 0, -4) js.Atomics.notify(body_internal.int_buffer,0) body_internal.worker.postMessage = set_exception data=response.read() # monkeypatch so it returns an unknown value for the magic number on initial fetch call with pytest.raises(http.client.HTTPException): # make posted messages set an exception worker=urllib3.contrib.emscripten.fetch._fetcher.js_worker def set_exception(self,*args): array=js.Int32Array.new(args[0].buffer) array[0]=-1234 worker.postMessage=set_exception.__get__(worker,worker.__class__) conn.request("GET","/",preload_content=False) response = conn.getresponse() data=response.read() urllib3.contrib.emscripten.fetch._fetcher.js_worker.postMessage=old_pm # 3) Stopping the worker receiving any messages which should cause a timeout error # in the receive stream with pytest.raises(TimeoutError): conn.request("GET","/",preload_content=False) response = conn.getresponse() # make posted messages not be send body_internal = response._response.body def ignore_message(*args): pass old_pm= body_internal.worker.postMessage body_internal.worker.postMessage = ignore_message data=response.read() body_internal.worker.postMessage = old_pm """ run_from_server.run_webworker(worker_code) @install_urllib3_wheel() def test_response_init_length( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import pytest import urllib3.exceptions from urllib3.connection import HTTPConnection from urllib3.response import BaseHTTPResponse conn = HTTPConnection(host, port) conn.request("GET", f"http://{host}:{port}/") response = conn.getresponse() assert isinstance(response, BaseHTTPResponse) # head shouldn't have length length = response._init_length("HEAD") assert length == 0 # multiple inconsistent lengths - should raise invalid header with pytest.raises(urllib3.exceptions.InvalidHeader): response.headers["Content-Length"] = "4,5,6" length = response._init_length("GET") # non-numeric length - should return None response.headers["Content-Length"] = "anna" length = response._init_length("GET") assert length is None # numeric length - should return it response.headers["Content-Length"] = "54" length = response._init_length("GET") assert length == 54 # negative length - should return None response.headers["Content-Length"] = "-12" length = response._init_length("GET") assert length is None # none -> None del response.headers["Content-Length"] length = response._init_length("GET") assert length is None pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) @install_urllib3_wheel() def test_response_close_connection( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] from urllib3.connection import HTTPConnection from urllib3.response import BaseHTTPResponse conn = HTTPConnection(host, port) conn.request("GET", f"http://{host}:{port}/") response = conn.getresponse() assert isinstance(response, BaseHTTPResponse) response.close() assert conn.is_closed pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) @install_urllib3_wheel() def test_read_chunked( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] from urllib3.connection import HTTPConnection conn = HTTPConnection(host, port) conn.request("GET", f"http://{host}:{port}/mediumfile", preload_content=False) response = conn.getresponse() count = 0 for x in response.read_chunked(512): count += 1 if count < 10: assert len(x) == 512 pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port ) @install_urllib3_wheel() def test_retries( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int) -> None: # type: ignore[no-untyped-def] import pytest import urllib3 pool = urllib3.HTTPConnectionPool( host, port, maxsize=1, block=True, retries=urllib3.util.Retry(connect=5, read=5, redirect=5), ) # monkeypatch connection class to count calls old_request = urllib3.connection.HTTPConnection.request count = 0 def count_calls(self, *args, **argv): # type: ignore[no-untyped-def] nonlocal count count += 1 return old_request(self, *args, **argv) urllib3.connection.HTTPConnection.request = count_calls # type: ignore[method-assign] with pytest.raises(urllib3.exceptions.MaxRetryError): pool.urlopen("GET", "/") # this should fail, but should have tried 6 times total assert count == 6 pyodide_test(selenium_coverage, testserver_http.http_host, find_unused_port()) @install_urllib3_wheel() def test_insecure_requests_warning( selenium_coverage: typing.Any, testserver_http: PyodideServerInfo ) -> None: @run_in_pyodide # type: ignore[misc] def pyodide_test(selenium_coverage, host: str, port: int, https_port: int) -> None: # type: ignore[no-untyped-def] import warnings import urllib3 import urllib3.exceptions http = urllib3.PoolManager() with warnings.catch_warnings(record=True) as w: http.request("GET", f"https://{host}:{https_port}") assert len(w) == 0 pyodide_test( selenium_coverage, testserver_http.http_host, testserver_http.http_port, testserver_http.https_port, ) contrib/emscripten/conftest.py 0000644 00000014677 15025234504 0012573 0 ustar 00 from __future__ import annotations import contextlib import os import random import textwrap import typing from dataclasses import dataclass from pathlib import Path from typing import Any, Generator import hypercorn import pytest from dummyserver.app import pyodide_testing_app from dummyserver.hypercornserver import run_hypercorn_in_thread from dummyserver.socketserver import DEFAULT_CERTS from urllib3.util.url import parse_url _coverage_count = 0 def _get_coverage_filename(prefix: str) -> str: global _coverage_count _coverage_count += 1 rand_part = "".join([random.choice("1234567890") for x in range(20)]) return prefix + rand_part + f".{_coverage_count}" @pytest.fixture(scope="module") def testserver_http( request: pytest.FixtureRequest, ) -> Generator[PyodideServerInfo, None, None]: pyodide_dist_dir = Path(os.getcwd(), request.config.getoption("--dist-dir")) pyodide_testing_app.config["pyodide_dist_dir"] = str(pyodide_dist_dir) http_host = "localhost" with contextlib.ExitStack() as stack: http_server_config = hypercorn.Config() http_server_config.bind = [f"{http_host}:0"] stack.enter_context( run_hypercorn_in_thread(http_server_config, pyodide_testing_app) ) http_port = typing.cast(int, parse_url(http_server_config.bind[0]).port) https_server_config = hypercorn.Config() https_server_config.certfile = DEFAULT_CERTS["certfile"] https_server_config.keyfile = DEFAULT_CERTS["keyfile"] https_server_config.verify_mode = DEFAULT_CERTS["cert_reqs"] https_server_config.ca_certs = DEFAULT_CERTS["ca_certs"] https_server_config.alpn_protocols = DEFAULT_CERTS["alpn_protocols"] https_server_config.bind = [f"{http_host}:0"] stack.enter_context( run_hypercorn_in_thread(https_server_config, pyodide_testing_app) ) https_port = typing.cast(int, parse_url(https_server_config.bind[0]).port) yield PyodideServerInfo( http_host=http_host, http_port=http_port, https_port=https_port, ) print("Server teardown") @pytest.fixture() def selenium_coverage(selenium: Any) -> Generator[Any, None, None]: def _install_coverage(self: Any) -> None: self.run_js( """ await pyodide.loadPackage("coverage") await pyodide.runPythonAsync(`import coverage _coverage= coverage.Coverage(source_pkgs=['urllib3']) _coverage.start() ` )""" ) setattr( selenium, "_install_coverage", _install_coverage.__get__(selenium, selenium.__class__), ) selenium._install_coverage() yield selenium # on teardown, save _coverage output coverage_out_binary = bytes( selenium.run_js( """ return await pyodide.runPythonAsync(` _coverage.stop() _coverage.save() _coverage_datafile = open(".coverage","rb") _coverage_outdata = _coverage_datafile.read() # avoid polluting main namespace too much import js as _coverage_js # convert to js Array (as default conversion is TypedArray which does # bad things in firefox) _coverage_js.Array.from_(_coverage_outdata) `) """ ) ) with open(f"{_get_coverage_filename('.coverage.emscripten.')}", "wb") as outfile: outfile.write(coverage_out_binary) class ServerRunnerInfo: def __init__(self, host: str, port: int, selenium: Any) -> None: self.host = host self.port = port self.selenium = selenium def run_webworker(self, code: str) -> Any: if isinstance(code, str) and code.startswith("\n"): # we have a multiline string, fix indentation code = textwrap.dedent(code) # add coverage collection to this code code = ( textwrap.dedent( """ import coverage _coverage= coverage.Coverage(source_pkgs=['urllib3']) _coverage.start() """ ) + code ) code += textwrap.dedent( """ _coverage.stop() _coverage.save() _coverage_datafile = open(".coverage","rb") _coverage_outdata = _coverage_datafile.read() # avoid polluting main namespace too much import js as _coverage_js # convert to js Array (as default conversion is TypedArray which does # bad things in firefox) _coverage_js.Array.from_(_coverage_outdata) """ ) coverage_out_binary = bytes( self.selenium.run_js( f""" let worker = new Worker('https://{self.host}:{self.port}/pyodide/webworker_dev.js'); let p = new Promise((res, rej) => {{ worker.onmessageerror = e => rej(e); worker.onerror = e => rej(e); worker.onmessage = e => {{ if (e.data.results) {{ res(e.data.results); }} else {{ rej(e.data.error); }} }}; worker.postMessage({{ python: {repr(code)} }}); }}); return await p; """, pyodide_checks=False, ) ) with open( f"{_get_coverage_filename('.coverage.emscripten.worker.')}", "wb" ) as outfile: outfile.write(coverage_out_binary) # run pyodide on our test server instead of on the default # pytest-pyodide one - this makes it so that # we are at the same origin as web requests to server_host @pytest.fixture() def run_from_server( selenium_coverage: Any, testserver_http: PyodideServerInfo ) -> Generator[ServerRunnerInfo, None, None]: addr = f"https://{testserver_http.http_host}:{testserver_http.https_port}/pyodide/test.html" selenium_coverage.goto(addr) selenium_coverage.javascript_setup() selenium_coverage.load_pyodide() selenium_coverage.initialize_pyodide() selenium_coverage.save_state() selenium_coverage.restore_state() # install the wheel, which is served at /wheel/* selenium_coverage.run_js( """ await pyodide.loadPackage('/wheel/dist.whl') """ ) selenium_coverage._install_coverage() yield ServerRunnerInfo( testserver_http.http_host, testserver_http.https_port, selenium_coverage ) @dataclass class PyodideServerInfo: http_port: int https_port: int http_host: str contrib/emscripten/__init__.py 0000644 00000000000 15025234504 0012454 0 ustar 00 contrib/__init__.py 0000644 00000000000 15025234504 0010303 0 ustar 00 contrib/test_pyopenssl_dependencies.py 0000644 00000003704 15025234504 0014363 0 ustar 00 from __future__ import annotations from unittest.mock import Mock, patch import pytest try: from urllib3.contrib.pyopenssl import extract_from_urllib3, inject_into_urllib3 except ImportError: pass def setup_module() -> None: try: from urllib3.contrib.pyopenssl import inject_into_urllib3 inject_into_urllib3() except ImportError as e: pytest.skip(f"Could not import PyOpenSSL: {e!r}") def teardown_module() -> None: try: from urllib3.contrib.pyopenssl import extract_from_urllib3 extract_from_urllib3() except ImportError: pass class TestPyOpenSSLInjection: """ Tests for error handling in pyopenssl's 'inject_into urllib3' """ def test_inject_validate_fail_cryptography(self) -> None: """ Injection should not be supported if cryptography is too old. """ try: with patch("cryptography.x509.extensions.Extensions") as mock: del mock.get_extension_for_class with pytest.raises(ImportError): inject_into_urllib3() finally: # `inject_into_urllib3` is not supposed to succeed. # If it does, this test should fail, but we need to # clean up so that subsequent tests are unaffected. extract_from_urllib3() def test_inject_validate_fail_pyopenssl(self) -> None: """ Injection should not be supported if pyOpenSSL is too old. """ try: return_val = Mock() del return_val._x509 with patch("OpenSSL.crypto.X509", return_value=return_val): with pytest.raises(ImportError): inject_into_urllib3() finally: # `inject_into_urllib3` is not supposed to succeed. # If it does, this test should fail, but we need to # clean up so that subsequent tests are unaffected. extract_from_urllib3() test_poolmanager.py 0000644 00000044555 15025234504 0010476 0 ustar 00 from __future__ import annotations import gc import socket from test import resolvesLocalhostFQDN from unittest import mock from unittest.mock import MagicMock, patch import pytest from urllib3 import connection_from_url from urllib3.connectionpool import HTTPSConnectionPool from urllib3.exceptions import LocationValueError from urllib3.poolmanager import ( _DEFAULT_BLOCKSIZE, PoolKey, PoolManager, key_fn_by_scheme, ) from urllib3.util import retry, timeout from urllib3.util.url import Url class TestPoolManager: @resolvesLocalhostFQDN() def test_same_url(self) -> None: # Convince ourselves that normally we don't get the same object conn1 = connection_from_url("http://localhost:8081/foo") conn2 = connection_from_url("http://localhost:8081/bar") assert conn1 != conn2 # Now try again using the PoolManager p = PoolManager(1) conn1 = p.connection_from_url("http://localhost:8081/foo") conn2 = p.connection_from_url("http://localhost:8081/bar") assert conn1 == conn2 # Ensure that FQDNs are handled separately from relative domains p = PoolManager(2) conn1 = p.connection_from_url("http://localhost.:8081/foo") conn2 = p.connection_from_url("http://localhost:8081/bar") assert conn1 != conn2 def test_many_urls(self) -> None: urls = [ "http://localhost:8081/foo", "http://www.google.com/mail", "http://localhost:8081/bar", "https://www.google.com/", "https://www.google.com/mail", "http://yahoo.com", "http://bing.com", "http://yahoo.com/", ] connections = set() p = PoolManager(10) for url in urls: conn = p.connection_from_url(url) connections.add(conn) assert len(connections) == 5 def test_manager_clear(self) -> None: p = PoolManager(5) p.connection_from_url("http://google.com") assert len(p.pools) == 1 p.clear() assert len(p.pools) == 0 @pytest.mark.parametrize("url", ["http://@", None]) def test_nohost(self, url: str | None) -> None: p = PoolManager(5) with pytest.raises(LocationValueError): p.connection_from_url(url=url) # type: ignore[arg-type] def test_contextmanager(self) -> None: with PoolManager(1) as p: p.connection_from_url("http://google.com") assert len(p.pools) == 1 assert len(p.pools) == 0 def test_http_pool_key_fields(self) -> None: """Assert the HTTPPoolKey fields are honored when selecting a pool.""" connection_pool_kw = { "timeout": timeout.Timeout(3.14), "retries": retry.Retry(total=6, connect=2), "block": True, "source_address": "127.0.0.1", "blocksize": _DEFAULT_BLOCKSIZE + 1, } p = PoolManager() conn_pools = [ p.connection_from_url("http://example.com/"), p.connection_from_url("http://example.com:8000/"), p.connection_from_url("http://other.example.com/"), ] for key, value in connection_pool_kw.items(): p.connection_pool_kw[key] = value conn_pools.append(p.connection_from_url("http://example.com/")) assert all( x is not y for i, x in enumerate(conn_pools) for j, y in enumerate(conn_pools) if i != j ) assert all(isinstance(key, PoolKey) for key in p.pools.keys()) def test_https_pool_key_fields(self) -> None: """Assert the HTTPSPoolKey fields are honored when selecting a pool.""" connection_pool_kw = { "timeout": timeout.Timeout(3.14), "retries": retry.Retry(total=6, connect=2), "block": True, "source_address": "127.0.0.1", "key_file": "/root/totally_legit.key", "cert_file": "/root/totally_legit.crt", "cert_reqs": "CERT_REQUIRED", "ca_certs": "/root/path_to_pem", "ssl_version": "SSLv23_METHOD", "blocksize": _DEFAULT_BLOCKSIZE + 1, } p = PoolManager() conn_pools = [ p.connection_from_url("https://example.com/"), p.connection_from_url("https://example.com:4333/"), p.connection_from_url("https://other.example.com/"), ] # Asking for a connection pool with the same key should give us an # existing pool. dup_pools = [] for key, value in connection_pool_kw.items(): p.connection_pool_kw[key] = value conn_pools.append(p.connection_from_url("https://example.com/")) dup_pools.append(p.connection_from_url("https://example.com/")) assert all( x is not y for i, x in enumerate(conn_pools) for j, y in enumerate(conn_pools) if i != j ) assert all(pool in conn_pools for pool in dup_pools) assert all(isinstance(key, PoolKey) for key in p.pools.keys()) def test_default_pool_key_funcs_copy(self) -> None: """Assert each PoolManager gets a copy of ``pool_keys_by_scheme``.""" p = PoolManager() assert p.key_fn_by_scheme == p.key_fn_by_scheme assert p.key_fn_by_scheme is not key_fn_by_scheme def test_pools_keyed_with_from_host(self) -> None: """Assert pools are still keyed correctly with connection_from_host.""" ssl_kw = { "key_file": "/root/totally_legit.key", "cert_file": "/root/totally_legit.crt", "cert_reqs": "CERT_REQUIRED", "ca_certs": "/root/path_to_pem", "ssl_version": "SSLv23_METHOD", } p = PoolManager(5, **ssl_kw) # type: ignore[arg-type] conns = [p.connection_from_host("example.com", 443, scheme="https")] for k in ssl_kw: p.connection_pool_kw[k] = "newval" conns.append(p.connection_from_host("example.com", 443, scheme="https")) assert all( x is not y for i, x in enumerate(conns) for j, y in enumerate(conns) if i != j ) def test_https_connection_from_url_case_insensitive(self) -> None: """Assert scheme case is ignored when pooling HTTPS connections.""" p = PoolManager() pool = p.connection_from_url("https://example.com/") other_pool = p.connection_from_url("HTTPS://EXAMPLE.COM/") assert 1 == len(p.pools) assert pool is other_pool assert all(isinstance(key, PoolKey) for key in p.pools.keys()) def test_https_connection_from_host_case_insensitive(self) -> None: """Assert scheme case is ignored when getting the https key class.""" p = PoolManager() pool = p.connection_from_host("example.com", scheme="https") other_pool = p.connection_from_host("EXAMPLE.COM", scheme="HTTPS") assert 1 == len(p.pools) assert pool is other_pool assert all(isinstance(key, PoolKey) for key in p.pools.keys()) def test_https_connection_from_context_case_insensitive(self) -> None: """Assert scheme case is ignored when getting the https key class.""" p = PoolManager() context = {"scheme": "https", "host": "example.com", "port": "443"} other_context = {"scheme": "HTTPS", "host": "EXAMPLE.COM", "port": "443"} pool = p.connection_from_context(context) other_pool = p.connection_from_context(other_context) assert 1 == len(p.pools) assert pool is other_pool assert all(isinstance(key, PoolKey) for key in p.pools.keys()) def test_http_connection_from_url_case_insensitive(self) -> None: """Assert scheme case is ignored when pooling HTTP connections.""" p = PoolManager() pool = p.connection_from_url("http://example.com/") other_pool = p.connection_from_url("HTTP://EXAMPLE.COM/") assert 1 == len(p.pools) assert pool is other_pool assert all(isinstance(key, PoolKey) for key in p.pools.keys()) def test_http_connection_from_host_case_insensitive(self) -> None: """Assert scheme case is ignored when getting the https key class.""" p = PoolManager() pool = p.connection_from_host("example.com", scheme="http") other_pool = p.connection_from_host("EXAMPLE.COM", scheme="HTTP") assert 1 == len(p.pools) assert pool is other_pool assert all(isinstance(key, PoolKey) for key in p.pools.keys()) def test_assert_hostname_and_fingerprint_flag(self) -> None: """Assert that pool manager can accept hostname and fingerprint flags.""" fingerprint = "92:81:FE:85:F7:0C:26:60:EC:D6:B3:BF:93:CF:F9:71:CC:07:7D:0A" p = PoolManager(assert_hostname=True, assert_fingerprint=fingerprint) pool = p.connection_from_url("https://example.com/") assert 1 == len(p.pools) assert isinstance(pool, HTTPSConnectionPool) assert pool.assert_hostname assert fingerprint == pool.assert_fingerprint def test_http_connection_from_context_case_insensitive(self) -> None: """Assert scheme case is ignored when getting the https key class.""" p = PoolManager() context = {"scheme": "http", "host": "example.com", "port": "8080"} other_context = {"scheme": "HTTP", "host": "EXAMPLE.COM", "port": "8080"} pool = p.connection_from_context(context) other_pool = p.connection_from_context(other_context) assert 1 == len(p.pools) assert pool is other_pool assert all(isinstance(key, PoolKey) for key in p.pools.keys()) @patch("urllib3.poolmanager.PoolManager.connection_from_host") def test_deprecated_no_scheme(self, connection_from_host: mock.MagicMock) -> None: # Don't actually make a network connection, just verify the DeprecationWarning connection_from_host.side_effect = ConnectionError("Not attempting connection") p = PoolManager() with pytest.warns(DeprecationWarning) as records: with pytest.raises(ConnectionError): p.request(method="GET", url="evil.com://good.com") msg = ( "URLs without a scheme (ie 'https://') are deprecated and will raise an error " "in a future version of urllib3. To avoid this DeprecationWarning ensure all URLs " "start with 'https://' or 'http://'. Read more in this issue: " "https://github.com/urllib3/urllib3/issues/2920" ) assert len(records) == 1 assert isinstance(records[0].message, DeprecationWarning) assert records[0].message.args[0] == msg @patch("urllib3.poolmanager.PoolManager.connection_from_pool_key") def test_connection_from_context_strict_param( self, connection_from_pool_key: mock.MagicMock ) -> None: p = PoolManager() context = { "scheme": "http", "host": "example.com", "port": 8080, "strict": True, } with pytest.warns(DeprecationWarning) as records: p.connection_from_context(context) msg = ( "The 'strict' parameter is no longer needed on Python 3+. " "This will raise an error in urllib3 v2.1.0." ) record = records[0] assert isinstance(record.message, Warning) assert record.message.args[0] == msg _, kwargs = connection_from_pool_key.call_args assert kwargs["request_context"] == { "scheme": "http", "host": "example.com", "port": 8080, } def test_custom_pool_key(self) -> None: """Assert it is possible to define a custom key function.""" p = PoolManager(10) p.key_fn_by_scheme["http"] = lambda x: tuple(x["key"]) # type: ignore[assignment] pool1 = p.connection_from_url( "http://example.com", pool_kwargs={"key": "value"} ) pool2 = p.connection_from_url( "http://example.com", pool_kwargs={"key": "other"} ) pool3 = p.connection_from_url( "http://example.com", pool_kwargs={"key": "value", "x": "y"} ) assert 2 == len(p.pools) assert pool1 is pool3 assert pool1 is not pool2 def test_override_pool_kwargs_url(self) -> None: """Assert overriding pool kwargs works with connection_from_url.""" p = PoolManager() pool_kwargs = {"retries": 100, "block": True} default_pool = p.connection_from_url("http://example.com/") override_pool = p.connection_from_url( "http://example.com/", pool_kwargs=pool_kwargs ) assert retry.Retry.DEFAULT == default_pool.retries assert not default_pool.block assert 100 == override_pool.retries assert override_pool.block def test_override_pool_kwargs_host(self) -> None: """Assert overriding pool kwargs works with connection_from_host""" p = PoolManager() pool_kwargs = {"retries": 100, "block": True} default_pool = p.connection_from_host("example.com", scheme="http") override_pool = p.connection_from_host( "example.com", scheme="http", pool_kwargs=pool_kwargs ) assert retry.Retry.DEFAULT == default_pool.retries assert not default_pool.block assert 100 == override_pool.retries assert override_pool.block def test_pool_kwargs_socket_options(self) -> None: """Assert passing socket options works with connection_from_host""" p = PoolManager(socket_options=[]) override_opts = [ (socket.SOL_SOCKET, socket.SO_REUSEADDR, 1), (socket.IPPROTO_TCP, socket.TCP_NODELAY, 1), ] pool_kwargs = {"socket_options": override_opts} default_pool = p.connection_from_host("example.com", scheme="http") override_pool = p.connection_from_host( "example.com", scheme="http", pool_kwargs=pool_kwargs ) assert default_pool.conn_kw["socket_options"] == [] assert override_pool.conn_kw["socket_options"] == override_opts def test_merge_pool_kwargs(self) -> None: """Assert _merge_pool_kwargs works in the happy case""" p = PoolManager(retries=100) merged = p._merge_pool_kwargs({"new_key": "value"}) assert {"retries": 100, "new_key": "value"} == merged def test_merge_pool_kwargs_none(self) -> None: """Assert false-y values to _merge_pool_kwargs result in defaults""" p = PoolManager(retries=100) merged = p._merge_pool_kwargs({}) assert p.connection_pool_kw == merged merged = p._merge_pool_kwargs(None) assert p.connection_pool_kw == merged def test_merge_pool_kwargs_remove_key(self) -> None: """Assert keys can be removed with _merge_pool_kwargs""" p = PoolManager(retries=100) merged = p._merge_pool_kwargs({"retries": None}) assert "retries" not in merged def test_merge_pool_kwargs_invalid_key(self) -> None: """Assert removing invalid keys with _merge_pool_kwargs doesn't break""" p = PoolManager(retries=100) merged = p._merge_pool_kwargs({"invalid_key": None}) assert p.connection_pool_kw == merged def test_pool_manager_no_url_absolute_form(self) -> None: """Valides we won't send a request with absolute form without a proxy""" p = PoolManager() assert p._proxy_requires_url_absolute_form(Url("http://example.com")) is False assert p._proxy_requires_url_absolute_form(Url("https://example.com")) is False @pytest.mark.parametrize( "input_blocksize,expected_blocksize", [ (_DEFAULT_BLOCKSIZE, _DEFAULT_BLOCKSIZE), (None, _DEFAULT_BLOCKSIZE), (8192, 8192), ], ) def test_poolmanager_blocksize( self, input_blocksize: int, expected_blocksize: int ) -> None: """Assert PoolManager sets blocksize properly""" p = PoolManager() pool_blocksize = p.connection_from_url( "http://example.com", {"blocksize": input_blocksize} ) assert pool_blocksize.conn_kw["blocksize"] == expected_blocksize assert pool_blocksize._get_conn().blocksize == expected_blocksize @pytest.mark.parametrize( "url", [ "[a::b%zone]", "[a::b%25zone]", "http://[a::b%zone]", "http://[a::b%25zone]", ], ) @patch("urllib3.util.connection.create_connection") def test_e2e_connect_to_ipv6_scoped( self, create_connection: MagicMock, url: str ) -> None: """Checks that IPv6 scoped addresses are properly handled end-to-end. This is not strictly speaking a pool manager unit test - this test lives here in absence of a better code location for e2e/integration tests. """ p = PoolManager() conn_pool = p.connection_from_url(url) conn = conn_pool._get_conn() conn.connect() assert create_connection.call_args[0][0] == ("a::b%zone", 80) @patch("urllib3.connection.ssl_wrap_socket") @patch("urllib3.util.connection.create_connection") def test_e2e_connect_to_ipv6_scoped_tls( self, create_connection: MagicMock, ssl_wrap_socket: MagicMock ) -> None: p = PoolManager() conn_pool = p.connection_from_url( "https://[a::b%zone]", pool_kwargs={"assert_hostname": False} ) conn = conn_pool._get_conn() conn.connect() assert ssl_wrap_socket.call_args[1]["server_hostname"] == "a::b" def test_thread_safty(self) -> None: pool_manager = PoolManager(num_pools=2) # thread 1 gets a pool for host x pool_1 = pool_manager.connection_from_url("http://host_x:80/") # thread 2 gets a pool for host y pool_2 = pool_manager.connection_from_url("http://host_y:80/") # thread 3 gets a pool for host z pool_3 = pool_manager.connection_from_url("http://host_z:80") # None of the pools should be closed, since all of them are referenced. assert pool_1.pool is not None assert pool_2.pool is not None assert pool_3.pool is not None conn_queue = pool_1.pool assert conn_queue.qsize() > 0 # thread 1 stops. del pool_1 gc.collect() # Connection should be closed, because reference to pool_1 is gone. assert conn_queue.qsize() == 0 test_response.py 0000644 00000160573 15025234504 0010027 0 ustar 00 from __future__ import annotations import contextlib import http.client as httplib import socket import ssl import typing import zlib from base64 import b64decode from http.client import IncompleteRead as httplib_IncompleteRead from io import BufferedReader, BytesIO, TextIOWrapper from test import onlyBrotli, onlyZstd from unittest import mock import pytest from urllib3 import HTTPHeaderDict from urllib3.exceptions import ( BodyNotHttplibCompatible, DecodeError, IncompleteRead, InvalidChunkLength, InvalidHeader, ProtocolError, ResponseNotChunked, SSLError, ) from urllib3.response import ( # type: ignore[attr-defined] BaseHTTPResponse, BytesQueueBuffer, HTTPResponse, brotli, ) from urllib3.util.response import is_fp_closed from urllib3.util.retry import RequestHistory, Retry class TestBytesQueueBuffer: def test_single_chunk(self) -> None: buffer = BytesQueueBuffer() assert len(buffer) == 0 with pytest.raises(RuntimeError, match="buffer is empty"): assert buffer.get(10) assert buffer.get(0) == b"" buffer.put(b"foo") with pytest.raises(ValueError, match="n should be > 0"): buffer.get(-1) assert buffer.get(1) == b"f" assert buffer.get(2) == b"oo" with pytest.raises(RuntimeError, match="buffer is empty"): assert buffer.get(10) def test_read_too_much(self) -> None: buffer = BytesQueueBuffer() buffer.put(b"foo") assert buffer.get(100) == b"foo" def test_multiple_chunks(self) -> None: buffer = BytesQueueBuffer() buffer.put(b"foo") buffer.put(b"bar") buffer.put(b"baz") assert len(buffer) == 9 assert buffer.get(1) == b"f" assert len(buffer) == 8 assert buffer.get(4) == b"ooba" assert len(buffer) == 4 assert buffer.get(4) == b"rbaz" assert len(buffer) == 0 def test_get_all_empty(self) -> None: q = BytesQueueBuffer() assert q.get_all() == b"" assert len(q) == 0 def test_get_all_single(self) -> None: q = BytesQueueBuffer() q.put(b"a") assert q.get_all() == b"a" assert len(q) == 0 def test_get_all_many(self) -> None: q = BytesQueueBuffer() q.put(b"a") q.put(b"b") q.put(b"c") assert q.get_all() == b"abc" assert len(q) == 0 @pytest.mark.parametrize( "get_func", (lambda b: b.get(len(b)), lambda b: b.get_all()), ids=("get", "get_all"), ) @pytest.mark.limit_memory("12.5 MB") # assert that we're not doubling memory usage def test_memory_usage( self, get_func: typing.Callable[[BytesQueueBuffer], str] ) -> None: # Allocate 10 1MiB chunks buffer = BytesQueueBuffer() for i in range(10): # This allocates 2MiB, putting the max at around 12MiB. Not sure why. buffer.put(bytes(2**20)) assert len(get_func(buffer)) == 10 * 2**20 @pytest.mark.limit_memory("10.01 MB") def test_get_all_memory_usage_single_chunk(self) -> None: buffer = BytesQueueBuffer() chunk = bytes(10 * 2**20) # 10 MiB buffer.put(chunk) assert buffer.get_all() is chunk # A known random (i.e, not-too-compressible) payload generated with: # "".join(random.choice(string.printable) for i in range(512)) # .encode("zlib").encode("base64") # Randomness in tests == bad, and fixing a seed may not be sufficient. ZLIB_PAYLOAD = b64decode( b"""\ eJwFweuaoQAAANDfineQhiKLUiaiCzvuTEmNNlJGiL5QhnGpZ99z8luQfe1AHoMioB+QSWHQu/L+ lzd7W5CipqYmeVTBjdgSATdg4l4Z2zhikbuF+EKn69Q0DTpdmNJz8S33odfJoVEexw/l2SS9nFdi pis7KOwXzfSqarSo9uJYgbDGrs1VNnQpT9f8zAorhYCEZronZQF9DuDFfNK3Hecc+WHLnZLQptwk nufw8S9I43sEwxsT71BiqedHo0QeIrFE01F/4atVFXuJs2yxIOak3bvtXjUKAA6OKnQJ/nNvDGKZ Khe5TF36JbnKVjdcL1EUNpwrWVfQpFYJ/WWm2b74qNeSZeQv5/xBhRdOmKTJFYgO96PwrHBlsnLn a3l0LwJsloWpMbzByU5WLbRE6X5INFqjQOtIwYz5BAlhkn+kVqJvWM5vBlfrwP42ifonM5yF4ciJ auHVks62997mNGOsM7WXNG3P98dBHPo2NhbTvHleL0BI5dus2JY81MUOnK3SGWLH8HeWPa1t5KcW S5moAj5HexY/g/F8TctpxwsvyZp38dXeLDjSQvEQIkF7XR3YXbeZgKk3V34KGCPOAeeuQDIgyVhV nP4HF2uWHA==""" ) @pytest.fixture def sock() -> typing.Generator[socket.socket, None, None]: s = socket.socket() yield s s.close() class TestLegacyResponse: def test_getheaders(self) -> None: headers = {"host": "example.com"} r = HTTPResponse(headers=headers) with pytest.warns( DeprecationWarning, match=r"HTTPResponse.getheaders\(\) is deprecated", ): assert r.getheaders() == HTTPHeaderDict(headers) def test_getheader(self) -> None: headers = {"host": "example.com"} r = HTTPResponse(headers=headers) with pytest.warns( DeprecationWarning, match=r"HTTPResponse.getheader\(\) is deprecated", ): assert r.getheader("host") == "example.com" class TestResponse: def test_cache_content(self) -> None: r = HTTPResponse(b"foo") assert r._body == b"foo" assert r.data == b"foo" assert r._body == b"foo" def test_cache_content_preload_false(self) -> None: fp = BytesIO(b"foo") r = HTTPResponse(fp, preload_content=False) assert not r._body assert r.data == b"foo" assert r._body == b"foo" assert r.data == b"foo" def test_default(self) -> None: r = HTTPResponse() assert r.data is None def test_none(self) -> None: r = HTTPResponse(None) # type: ignore[arg-type] assert r.data is None def test_preload(self) -> None: fp = BytesIO(b"foo") r = HTTPResponse(fp, preload_content=True) assert fp.tell() == len(b"foo") assert r.data == b"foo" def test_no_preload(self) -> None: fp = BytesIO(b"foo") r = HTTPResponse(fp, preload_content=False) assert fp.tell() == 0 assert r.data == b"foo" assert fp.tell() == len(b"foo") def test_decode_bad_data(self) -> None: fp = BytesIO(b"\x00" * 10) with pytest.raises(DecodeError): HTTPResponse(fp, headers={"content-encoding": "deflate"}) def test_reference_read(self) -> None: fp = BytesIO(b"foo") r = HTTPResponse(fp, preload_content=False) assert r.read(0) == b"" assert r.read(1) == b"f" assert r.read(2) == b"oo" assert r.read() == b"" assert r.read() == b"" @pytest.mark.parametrize("read_args", ((), (None,), (-1,))) def test_reference_read_until_eof(self, read_args: tuple[typing.Any, ...]) -> None: fp = BytesIO(b"foo") r = HTTPResponse(fp, preload_content=False) assert r.read(*read_args) == b"foo" def test_reference_read1(self) -> None: fp = BytesIO(b"foobar") r = HTTPResponse(fp, preload_content=False) assert r.read1(0) == b"" assert r.read1(1) == b"f" assert r.read1(2) == b"oo" assert r.read1() == b"bar" assert r.read1() == b"" @pytest.mark.parametrize("read1_args", ((), (None,), (-1,))) def test_reference_read1_without_limit( self, read1_args: tuple[typing.Any, ...] ) -> None: fp = BytesIO(b"foo") r = HTTPResponse(fp, preload_content=False) assert r.read1(*read1_args) == b"foo" def test_reference_read1_nodecode(self) -> None: fp = BytesIO(b"foobar") r = HTTPResponse(fp, preload_content=False, decode_content=False) assert r.read1(0) == b"" assert r.read1(1) == b"f" assert r.read1(2) == b"oo" assert r.read1() == b"bar" assert r.read1() == b"" def test_decoding_read1(self) -> None: data = zlib.compress(b"foobar") fp = BytesIO(data) r = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) assert r.read1(1) == b"f" assert r.read1(2) == b"oo" assert r.read1() == b"bar" assert r.read1() == b"" def test_decode_deflate(self) -> None: data = zlib.compress(b"foo") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "deflate"}) assert r.data == b"foo" def test_decode_deflate_case_insensitve(self) -> None: data = zlib.compress(b"foo") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "DeFlAtE"}) assert r.data == b"foo" def test_chunked_decoding_deflate(self) -> None: data = zlib.compress(b"foo") fp = BytesIO(data) r = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) assert r.read(1) == b"f" assert r.read(2) == b"oo" assert r.read() == b"" assert r.read() == b"" def test_chunked_decoding_deflate2(self) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() fp = BytesIO(data) r = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) assert r.read(1) == b"f" assert r.read(2) == b"oo" assert r.read() == b"" assert r.read() == b"" @pytest.mark.parametrize("content_encoding", ["gzip", "x-gzip"]) def test_chunked_decoding_gzip(self, content_encoding: str) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() fp = BytesIO(data) r = HTTPResponse( fp, headers={"content-encoding": content_encoding}, preload_content=False ) assert r.read(1) == b"f" assert r.read(2) == b"oo" assert r.read() == b"" assert r.read() == b"" def test_decode_gzip_multi_member(self) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() data = data * 3 fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "gzip"}) assert r.data == b"foofoofoo" def test_decode_gzip_error(self) -> None: fp = BytesIO(b"foo") with pytest.raises(DecodeError): HTTPResponse(fp, headers={"content-encoding": "gzip"}) def test_decode_gzip_swallow_garbage(self) -> None: # When data comes from multiple calls to read(), data after # the first zlib error (here triggered by garbage) should be # ignored. compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() data = data * 3 + b"foo" fp = BytesIO(data) r = HTTPResponse( fp, headers={"content-encoding": "gzip"}, preload_content=False ) ret = b"" for _ in range(100): ret += r.read(1) if r.closed: break assert ret == b"foofoofoo" def test_chunked_decoding_gzip_swallow_garbage(self) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() data = data * 3 + b"foo" fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "gzip"}) assert r.data == b"foofoofoo" @onlyBrotli() def test_decode_brotli(self) -> None: data = brotli.compress(b"foo") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "br"}) assert r.data == b"foo" @onlyBrotli() def test_chunked_decoding_brotli(self) -> None: data = brotli.compress(b"foobarbaz") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "br"}, preload_content=False) ret = b"" for _ in range(100): ret += r.read(1) if r.closed: break assert ret == b"foobarbaz" @onlyBrotli() def test_decode_brotli_error(self) -> None: fp = BytesIO(b"foo") with pytest.raises(DecodeError): HTTPResponse(fp, headers={"content-encoding": "br"}) @onlyZstd() def test_decode_zstd(self) -> None: import zstandard as zstd data = zstd.compress(b"foo") fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "zstd"}) assert r.data == b"foo" @onlyZstd() def test_decode_multiframe_zstd(self) -> None: import zstandard as zstd data = ( # Zstandard frame zstd.compress(b"foo") # skippable frame (must be ignored) + bytes.fromhex( "50 2A 4D 18" # Magic_Number (little-endian) "07 00 00 00" # Frame_Size (little-endian) "00 00 00 00 00 00 00" # User_Data ) # Zstandard frame + zstd.compress(b"bar") ) fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "zstd"}) assert r.data == b"foobar" @onlyZstd() def test_chunked_decoding_zstd(self) -> None: import zstandard as zstd data = zstd.compress(b"foobarbaz") fp = BytesIO(data) r = HTTPResponse( fp, headers={"content-encoding": "zstd"}, preload_content=False ) ret = b"" for _ in range(100): ret += r.read(1) if r.closed: break assert ret == b"foobarbaz" decode_param_set = [ b"foo", b"x" * 100, ] @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_error(self, data: bytes) -> None: fp = BytesIO(data) with pytest.raises(DecodeError): HTTPResponse(fp, headers={"content-encoding": "zstd"}) @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_incomplete_preload_content(self, data: bytes) -> None: import zstandard as zstd data = zstd.compress(data) fp = BytesIO(data[:-1]) with pytest.raises(DecodeError): HTTPResponse(fp, headers={"content-encoding": "zstd"}) @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_incomplete_read(self, data: bytes) -> None: import zstandard as zstd data = zstd.compress(data) fp = BytesIO(data[:-1]) # shorten the data to trigger DecodeError # create response object without(!) reading/decoding the content r = HTTPResponse( fp, headers={"content-encoding": "zstd"}, preload_content=False ) # read/decode, expecting DecodeError with pytest.raises(DecodeError): r.read(decode_content=True) @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_incomplete_read1(self, data: bytes) -> None: import zstandard as zstd data = zstd.compress(data) fp = BytesIO(data[:-1]) r = HTTPResponse( fp, headers={"content-encoding": "zstd"}, preload_content=False ) # read/decode via read1(!), expecting DecodeError with pytest.raises(DecodeError): amt_decoded = 0 # loop, as read1() may return just partial data while amt_decoded < len(data): part = r.read1(decode_content=True) amt_decoded += len(part) @onlyZstd() @pytest.mark.parametrize("data", decode_param_set) def test_decode_zstd_read1(self, data: bytes) -> None: import zstandard as zstd encoded_data = zstd.compress(data) fp = BytesIO(encoded_data) r = HTTPResponse( fp, headers={"content-encoding": "zstd"}, preload_content=False ) amt_decoded = 0 decoded_data = b"" # loop, as read1() may return just partial data while amt_decoded < len(data): part = r.read1(decode_content=True) amt_decoded += len(part) decoded_data += part assert decoded_data == data def test_multi_decoding_deflate_deflate(self) -> None: data = zlib.compress(zlib.compress(b"foo")) fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "deflate, deflate"}) assert r.data == b"foo" def test_multi_decoding_deflate_gzip(self) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(zlib.compress(b"foo")) data += compress.flush() fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "deflate, gzip"}) assert r.data == b"foo" def test_multi_decoding_gzip_gzip(self) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(data) data += compress.flush() fp = BytesIO(data) r = HTTPResponse(fp, headers={"content-encoding": "gzip, gzip"}) assert r.data == b"foo" def test_read_multi_decoding_deflate_deflate(self) -> None: msg = b"foobarbaz" * 42 data = zlib.compress(zlib.compress(msg)) fp = BytesIO(data) r = HTTPResponse( fp, headers={"content-encoding": "deflate, deflate"}, preload_content=False ) assert r.read(3) == b"foo" assert r.read(3) == b"bar" assert r.read(3) == b"baz" assert r.read(9) == b"foobarbaz" assert r.read(9 * 3) == b"foobarbaz" * 3 assert r.read(9 * 37) == b"foobarbaz" * 37 assert r.read() == b"" def test_body_blob(self) -> None: resp = HTTPResponse(b"foo") assert resp.data == b"foo" assert resp.closed @pytest.mark.filterwarnings("ignore::pytest.PytestUnraisableExceptionWarning") def test_base_io(self) -> None: resp = BaseHTTPResponse( status=200, version=11, version_string="HTTP/1.1", reason=None, decode_content=False, request_url=None, ) assert not resp.closed assert not resp.readable() assert not resp.writable() with pytest.raises(NotImplementedError): resp.read() with pytest.raises(NotImplementedError): resp.close() def test_io(self, sock: socket.socket) -> None: fp = BytesIO(b"foo") resp = HTTPResponse(fp, preload_content=False) assert not resp.closed assert resp.readable() assert not resp.writable() with pytest.raises(IOError): resp.fileno() resp.close() assert resp.closed # Try closing with an `httplib.HTTPResponse`, because it has an # `isclosed` method. try: hlr = httplib.HTTPResponse(sock) resp2 = HTTPResponse(hlr, preload_content=False) assert not resp2.closed resp2.close() assert resp2.closed finally: hlr.close() # also try when only data is present. resp3 = HTTPResponse("foodata") with pytest.raises(IOError): resp3.fileno() resp3._fp = 2 # A corner case where _fp is present but doesn't have `closed`, # `isclosed`, or `fileno`. Unlikely, but possible. assert resp3.closed with pytest.raises(IOError): resp3.fileno() def test_io_closed_consistently_by_read(self, sock: socket.socket) -> None: try: hlr = httplib.HTTPResponse(sock) hlr.fp = BytesIO(b"foo") # type: ignore[assignment] hlr.chunked = 0 # type: ignore[assignment] hlr.length = 3 with HTTPResponse(hlr, preload_content=False) as resp: assert not resp.closed assert resp._fp is not None assert not resp._fp.isclosed() assert not is_fp_closed(resp._fp) assert not resp.isclosed() resp.read() assert resp.closed assert resp._fp.isclosed() assert is_fp_closed(resp._fp) assert resp.isclosed() finally: hlr.close() @pytest.mark.parametrize("read_amt", (None, 3)) @pytest.mark.parametrize("length_known", (True, False)) def test_io_closed_consistently_by_read1( self, sock: socket.socket, length_known: bool, read_amt: int | None ) -> None: with httplib.HTTPResponse(sock) as hlr: hlr.fp = BytesIO(b"foo") # type: ignore[assignment] hlr.chunked = 0 # type: ignore[assignment] hlr.length = 3 if length_known else None with HTTPResponse(hlr, preload_content=False) as resp: if length_known: resp.length_remaining = 3 assert not resp.closed assert resp._fp is not None assert not resp._fp.isclosed() assert not is_fp_closed(resp._fp) assert not resp.isclosed() resp.read1(read_amt) # If content length is unknown, IO is not closed until # the next read returning zero bytes. if not length_known: assert not resp.closed assert resp._fp is not None assert not resp._fp.isclosed() assert not is_fp_closed(resp._fp) assert not resp.isclosed() resp.read1(read_amt) assert resp.closed assert resp._fp.isclosed() assert is_fp_closed(resp._fp) assert resp.isclosed() @pytest.mark.parametrize("length_known", (True, False)) def test_io_not_closed_until_all_data_is_read( self, sock: socket.socket, length_known: bool ) -> None: with httplib.HTTPResponse(sock) as hlr: hlr.fp = BytesIO(b"foo") # type: ignore[assignment] hlr.chunked = 0 # type: ignore[assignment] length_remaining = 3 hlr.length = length_remaining if length_known else None with HTTPResponse(hlr, preload_content=False) as resp: if length_known: resp.length_remaining = length_remaining while length_remaining: assert not resp.closed assert resp._fp is not None assert not resp._fp.isclosed() assert not is_fp_closed(resp._fp) assert not resp.isclosed() data = resp.read(1) assert len(data) == 1 length_remaining -= 1 # If content length is unknown, IO is not closed until # the next read returning zero bytes. if not length_known: assert not resp.closed assert resp._fp is not None assert not resp._fp.isclosed() assert not is_fp_closed(resp._fp) assert not resp.isclosed() data = resp.read(1) assert len(data) == 0 assert resp.closed assert resp._fp.isclosed() # type: ignore[union-attr] assert is_fp_closed(resp._fp) assert resp.isclosed() @pytest.mark.parametrize("length_known", (True, False)) def test_io_not_closed_after_requesting_0_bytes( self, sock: socket.socket, length_known: bool ) -> None: with httplib.HTTPResponse(sock) as hlr: hlr.fp = BytesIO(b"foo") # type: ignore[assignment] hlr.chunked = 0 # type: ignore[assignment] length_remaining = 3 hlr.length = length_remaining if length_known else None with HTTPResponse(hlr, preload_content=False) as resp: if length_known: resp.length_remaining = length_remaining assert not resp.closed assert resp._fp is not None assert not resp._fp.isclosed() assert not is_fp_closed(resp._fp) assert not resp.isclosed() data = resp.read(0) assert data == b"" assert not resp.closed assert resp._fp is not None assert not resp._fp.isclosed() assert not is_fp_closed(resp._fp) assert not resp.isclosed() def test_io_bufferedreader(self) -> None: fp = BytesIO(b"foo") resp = HTTPResponse(fp, preload_content=False) br = BufferedReader(resp) # type: ignore[arg-type] assert br.read() == b"foo" br.close() assert resp.closed # HTTPResponse.read() by default closes the response # https://github.com/urllib3/urllib3/issues/1305 fp = BytesIO(b"hello\nworld") resp = HTTPResponse(fp, preload_content=False) with pytest.raises(ValueError, match="readline of closed file"): list(BufferedReader(resp)) # type: ignore[arg-type] b = b"fooandahalf" fp = BytesIO(b) resp = HTTPResponse(fp, preload_content=False) br = BufferedReader(resp, 5) # type: ignore[arg-type] br.read(1) # sets up the buffer, reading 5 assert len(fp.read()) == (len(b) - 5) # This is necessary to make sure the "no bytes left" part of `readinto` # gets tested. while not br.closed: br.read(5) def test_io_not_autoclose_bufferedreader(self) -> None: fp = BytesIO(b"hello\nworld") resp = HTTPResponse(fp, preload_content=False, auto_close=False) reader = BufferedReader(resp) # type: ignore[arg-type] assert list(reader) == [b"hello\n", b"world"] assert not reader.closed assert not resp.closed with pytest.raises(StopIteration): next(reader) reader.close() assert reader.closed assert resp.closed with pytest.raises(ValueError, match="readline of closed file"): next(reader) def test_io_textiowrapper(self) -> None: fp = BytesIO(b"\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f") resp = HTTPResponse(fp, preload_content=False) br = TextIOWrapper(resp, encoding="utf8") # type: ignore[arg-type] assert br.read() == "äöüß" br.close() assert resp.closed # HTTPResponse.read() by default closes the response # https://github.com/urllib3/urllib3/issues/1305 fp = BytesIO( b"\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f\n\xce\xb1\xce\xb2\xce\xb3\xce\xb4" ) resp = HTTPResponse(fp, preload_content=False) with pytest.raises(ValueError, match="I/O operation on closed file.?"): list(TextIOWrapper(resp)) # type: ignore[arg-type] def test_io_not_autoclose_textiowrapper(self) -> None: fp = BytesIO( b"\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f\n\xce\xb1\xce\xb2\xce\xb3\xce\xb4" ) resp = HTTPResponse(fp, preload_content=False, auto_close=False) reader = TextIOWrapper(resp, encoding="utf8") # type: ignore[arg-type] assert list(reader) == ["äöüß\n", "αβγδ"] assert not reader.closed assert not resp.closed with pytest.raises(StopIteration): next(reader) reader.close() assert reader.closed assert resp.closed with pytest.raises(ValueError, match="I/O operation on closed file.?"): next(reader) def test_read_with_illegal_mix_decode_toggle(self) -> None: data = zlib.compress(b"foo") fp = BytesIO(data) resp = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) assert resp.read(1) == b"f" with pytest.raises( RuntimeError, match=( r"Calling read\(decode_content=False\) is not supported after " r"read\(decode_content=True\) was called" ), ): resp.read(1, decode_content=False) with pytest.raises( RuntimeError, match=( r"Calling read\(decode_content=False\) is not supported after " r"read\(decode_content=True\) was called" ), ): resp.read(decode_content=False) def test_read1_with_illegal_mix_decode_toggle(self) -> None: data = zlib.compress(b"foo") fp = BytesIO(data) resp = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) assert resp.read1(1) == b"f" with pytest.raises( RuntimeError, match=( r"Calling read1\(decode_content=False\) is not supported after " r"read1\(decode_content=True\) was called" ), ): resp.read1(1, decode_content=False) with pytest.raises( RuntimeError, match=( r"Calling read1\(decode_content=False\) is not supported after " r"read1\(decode_content=True\) was called" ), ): resp.read1(decode_content=False) def test_read_with_mix_decode_toggle(self) -> None: data = zlib.compress(b"foo") fp = BytesIO(data) resp = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) assert resp.read(2, decode_content=False) is not None assert resp.read(1, decode_content=True) == b"f" def test_streaming(self) -> None: fp = BytesIO(b"foo") resp = HTTPResponse(fp, preload_content=False) stream = resp.stream(2, decode_content=False) assert next(stream) == b"fo" assert next(stream) == b"o" with pytest.raises(StopIteration): next(stream) def test_streaming_tell(self) -> None: fp = BytesIO(b"foo") resp = HTTPResponse(fp, preload_content=False) stream = resp.stream(2, decode_content=False) position = 0 position += len(next(stream)) assert 2 == position assert position == resp.tell() position += len(next(stream)) assert 3 == position assert position == resp.tell() with pytest.raises(StopIteration): next(stream) def test_gzipped_streaming(self) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() fp = BytesIO(data) resp = HTTPResponse( fp, headers={"content-encoding": "gzip"}, preload_content=False ) stream = resp.stream(2) assert next(stream) == b"fo" assert next(stream) == b"o" with pytest.raises(StopIteration): next(stream) def test_gzipped_streaming_tell(self) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) uncompressed_data = b"foo" data = compress.compress(uncompressed_data) data += compress.flush() fp = BytesIO(data) resp = HTTPResponse( fp, headers={"content-encoding": "gzip"}, preload_content=False ) stream = resp.stream() # Read everything payload = next(stream) assert payload == uncompressed_data assert len(data) == resp.tell() with pytest.raises(StopIteration): next(stream) def test_deflate_streaming_tell_intermediate_point(self) -> None: # Ensure that ``tell()`` returns the correct number of bytes when # part-way through streaming compressed content. NUMBER_OF_READS = 10 PART_SIZE = 64 class MockCompressedDataReading(BytesIO): """ A BytesIO-like reader returning ``payload`` in ``NUMBER_OF_READS`` calls to ``read``. """ def __init__(self, payload: bytes, payload_part_size: int) -> None: self.payloads = [ payload[i * payload_part_size : (i + 1) * payload_part_size] for i in range(NUMBER_OF_READS + 1) ] assert b"".join(self.payloads) == payload def read(self, _: int) -> bytes: # type: ignore[override] # Amount is unused. if len(self.payloads) > 0: return self.payloads.pop(0) return b"" def read1(self, amt: int) -> bytes: # type: ignore[override] return self.read(amt) uncompressed_data = zlib.decompress(ZLIB_PAYLOAD) payload_part_size = len(ZLIB_PAYLOAD) // NUMBER_OF_READS fp = MockCompressedDataReading(ZLIB_PAYLOAD, payload_part_size) resp = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) stream = resp.stream(PART_SIZE) parts_positions = [(part, resp.tell()) for part in stream] end_of_stream = resp.tell() with pytest.raises(StopIteration): next(stream) parts, positions = zip(*parts_positions) # Check that the payload is equal to the uncompressed data payload = b"".join(parts) assert uncompressed_data == payload # Check that the positions in the stream are correct # It is difficult to determine programmatically what the positions # returned by `tell` will be because the `HTTPResponse.read` method may # call socket `read` a couple of times if it doesn't have enough data # in the buffer or not call socket `read` at all if it has enough. All # this depends on the message, how it was compressed, what is # `PART_SIZE` and `payload_part_size`. # So for simplicity the expected values are hardcoded. expected = (92, 184, 230, 276, 322, 368, 414, 460) assert expected == positions # Check that the end of the stream is in the correct place assert len(ZLIB_PAYLOAD) == end_of_stream # Check that all parts have expected length expected_last_part_size = len(uncompressed_data) % PART_SIZE whole_parts = len(uncompressed_data) // PART_SIZE if expected_last_part_size == 0: expected_lengths = [PART_SIZE] * whole_parts else: expected_lengths = [PART_SIZE] * whole_parts + [expected_last_part_size] assert expected_lengths == [len(part) for part in parts] def test_deflate_streaming(self) -> None: data = zlib.compress(b"foo") fp = BytesIO(data) resp = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) stream = resp.stream(2) assert next(stream) == b"fo" assert next(stream) == b"o" with pytest.raises(StopIteration): next(stream) def test_deflate2_streaming(self) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS) data = compress.compress(b"foo") data += compress.flush() fp = BytesIO(data) resp = HTTPResponse( fp, headers={"content-encoding": "deflate"}, preload_content=False ) stream = resp.stream(2) assert next(stream) == b"fo" assert next(stream) == b"o" with pytest.raises(StopIteration): next(stream) def test_empty_stream(self) -> None: fp = BytesIO(b"") resp = HTTPResponse(fp, preload_content=False) stream = resp.stream(2, decode_content=False) with pytest.raises(StopIteration): next(stream) @pytest.mark.parametrize( "preload_content, amt, read_meth", [ (True, None, "read"), (False, None, "read"), (False, 10 * 2**20, "read"), (False, None, "read1"), (False, 10 * 2**20, "read1"), ], ) @pytest.mark.limit_memory("25 MB") def test_buffer_memory_usage_decode_one_chunk( self, preload_content: bool, amt: int, read_meth: str ) -> None: content_length = 10 * 2**20 # 10 MiB fp = BytesIO(zlib.compress(bytes(content_length))) resp = HTTPResponse( fp, preload_content=preload_content, headers={"content-encoding": "deflate"}, ) data = resp.data if preload_content else getattr(resp, read_meth)(amt) assert len(data) == content_length @pytest.mark.parametrize( "preload_content, amt, read_meth", [ (True, None, "read"), (False, None, "read"), (False, 10 * 2**20, "read"), (False, None, "read1"), (False, 10 * 2**20, "read1"), ], ) @pytest.mark.limit_memory("10.5 MB") def test_buffer_memory_usage_no_decoding( self, preload_content: bool, amt: int, read_meth: str ) -> None: content_length = 10 * 2**20 # 10 MiB fp = BytesIO(bytes(content_length)) resp = HTTPResponse(fp, preload_content=preload_content, decode_content=False) data = resp.data if preload_content else getattr(resp, read_meth)(amt) assert len(data) == content_length def test_length_no_header(self) -> None: fp = BytesIO(b"12345") resp = HTTPResponse(fp, preload_content=False) assert resp.length_remaining is None def test_length_w_valid_header(self) -> None: headers = {"content-length": "5"} fp = BytesIO(b"12345") resp = HTTPResponse(fp, headers=headers, preload_content=False) assert resp.length_remaining == 5 def test_length_w_bad_header(self) -> None: garbage = {"content-length": "foo"} fp = BytesIO(b"12345") resp = HTTPResponse(fp, headers=garbage, preload_content=False) assert resp.length_remaining is None garbage["content-length"] = "-10" resp = HTTPResponse(fp, headers=garbage, preload_content=False) assert resp.length_remaining is None def test_length_when_chunked(self) -> None: # This is expressly forbidden in RFC 7230 sec 3.3.2 # We fall back to chunked in this case and try to # handle response ignoring content length. headers = {"content-length": "5", "transfer-encoding": "chunked"} fp = BytesIO(b"12345") resp = HTTPResponse(fp, headers=headers, preload_content=False) assert resp.length_remaining is None def test_length_with_multiple_content_lengths(self) -> None: headers = {"content-length": "5, 5, 5"} garbage = {"content-length": "5, 42"} fp = BytesIO(b"abcde") resp = HTTPResponse(fp, headers=headers, preload_content=False) assert resp.length_remaining == 5 with pytest.raises(InvalidHeader): HTTPResponse(fp, headers=garbage, preload_content=False) def test_length_after_read(self) -> None: headers = {"content-length": "5"} # Test no defined length fp = BytesIO(b"12345") resp = HTTPResponse(fp, preload_content=False) resp.read() assert resp.length_remaining is None # Test our update from content-length fp = BytesIO(b"12345") resp = HTTPResponse(fp, headers=headers, preload_content=False) resp.read() assert resp.length_remaining == 0 # Test partial read fp = BytesIO(b"12345") resp = HTTPResponse(fp, headers=headers, preload_content=False) data = resp.stream(2) next(data) assert resp.length_remaining == 3 def test_mock_httpresponse_stream(self) -> None: # Mock out a HTTP Request that does enough to make it through urllib3's # read() and close() calls, and also exhausts and underlying file # object. class MockHTTPRequest: def __init__(self) -> None: self.fp: BytesIO | None = None def read(self, amt: int) -> bytes: assert self.fp is not None data = self.fp.read(amt) if not data: self.fp = None return data def read1(self, amt: int) -> bytes: return self.read(1) def close(self) -> None: self.fp = None bio = BytesIO(b"foo") fp = MockHTTPRequest() fp.fp = bio resp = HTTPResponse(fp, preload_content=False) # type: ignore[arg-type] stream = resp.stream(2) assert next(stream) == b"fo" assert next(stream) == b"o" with pytest.raises(StopIteration): next(stream) def test_mock_transfer_encoding_chunked(self) -> None: stream = [b"fo", b"o", b"bar"] fp = MockChunkedEncodingResponse(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) for i, c in enumerate(resp.stream()): assert c == stream[i] def test_mock_gzipped_transfer_encoding_chunked_decoded(self) -> None: """Show that we can decode the gzipped and chunked body.""" def stream() -> typing.Generator[bytes, None, None]: # Set up a generator to chunk the gzipped body compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foobar") data += compress.flush() for i in range(0, len(data), 2): yield data[i : i + 2] fp = MockChunkedEncodingResponse(list(stream())) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] headers = {"transfer-encoding": "chunked", "content-encoding": "gzip"} resp = HTTPResponse(r, preload_content=False, headers=headers) data = b"" for c in resp.stream(decode_content=True): data += c assert b"foobar" == data def test_mock_transfer_encoding_chunked_custom_read(self) -> None: stream = [b"foooo", b"bbbbaaaaar"] fp = MockChunkedEncodingResponse(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] r.chunked = True r.chunk_left = None resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) expected_response = [b"fo", b"oo", b"o", b"bb", b"bb", b"aa", b"aa", b"ar"] response = list(resp.read_chunked(2)) assert expected_response == response @pytest.mark.parametrize("read_chunked_args", ((), (None,), (-1,))) def test_mock_transfer_encoding_chunked_unlmtd_read( self, read_chunked_args: tuple[typing.Any, ...] ) -> None: stream = [b"foooo", b"bbbbaaaaar"] fp = MockChunkedEncodingResponse(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] r.chunked = True r.chunk_left = None resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) assert stream == list(resp.read_chunked(*read_chunked_args)) def test_read_not_chunked_response_as_chunks(self) -> None: fp = BytesIO(b"foo") resp = HTTPResponse(fp, preload_content=False) r = resp.read_chunked() with pytest.raises(ResponseNotChunked): next(r) def test_read_chunked_not_supported(self) -> None: fp = BytesIO(b"foo") resp = HTTPResponse( fp, preload_content=False, headers={"transfer-encoding": "chunked"} ) r = resp.read_chunked() with pytest.raises(BodyNotHttplibCompatible): next(r) def test_buggy_incomplete_read(self) -> None: # Simulate buggy versions of Python (<2.7.4) # See http://bugs.python.org/issue16298 content_length = 1337 fp = BytesIO(b"") resp = HTTPResponse( fp, headers={"content-length": str(content_length)}, preload_content=False, enforce_content_length=True, ) with pytest.raises(ProtocolError) as ctx: resp.read(3) orig_ex = ctx.value.args[1] assert isinstance(orig_ex, IncompleteRead) assert orig_ex.partial == 0 assert orig_ex.expected == content_length def test_incomplete_chunk(self) -> None: stream = [b"foooo", b"bbbbaaaaar"] fp = MockChunkedIncompleteRead(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] r.chunked = True r.chunk_left = None resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) with pytest.raises(ProtocolError) as ctx: next(resp.read_chunked()) orig_ex = ctx.value.args[1] assert isinstance(orig_ex, httplib_IncompleteRead) def test_invalid_chunk_length(self) -> None: stream = [b"foooo", b"bbbbaaaaar"] fp = MockChunkedInvalidChunkLength(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] r.chunked = True r.chunk_left = None resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) with pytest.raises(ProtocolError) as ctx: next(resp.read_chunked()) orig_ex = ctx.value.args[1] msg = ( "(\"Connection broken: InvalidChunkLength(got length b'ZZZ\\\\r\\\\n', 0 bytes read)\", " "InvalidChunkLength(got length b'ZZZ\\r\\n', 0 bytes read))" ) assert str(ctx.value) == msg assert isinstance(orig_ex, InvalidChunkLength) assert orig_ex.length == fp.BAD_LENGTH_LINE.encode() def test_truncated_before_chunk(self) -> None: stream = [b"foooo", b"bbbbaaaaar"] fp = MockChunkedNoChunks(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] r.chunked = True r.chunk_left = None resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) with pytest.raises(ProtocolError) as ctx: next(resp.read_chunked()) assert str(ctx.value) == "Response ended prematurely" def test_chunked_response_without_crlf_on_end(self) -> None: stream = [b"foo", b"bar", b"baz"] fp = MockChunkedEncodingWithoutCRLFOnEnd(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] r.chunked = True r.chunk_left = None resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) assert stream == list(resp.stream()) def test_chunked_response_with_extensions(self) -> None: stream = [b"foo", b"bar"] fp = MockChunkedEncodingWithExtensions(stream) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] r.chunked = True r.chunk_left = None resp = HTTPResponse( r, preload_content=False, headers={"transfer-encoding": "chunked"} ) assert stream == list(resp.stream()) def test_chunked_head_response(self) -> None: r = httplib.HTTPResponse(MockSock, method="HEAD") # type: ignore[arg-type] r.chunked = True r.chunk_left = None resp = HTTPResponse( "", preload_content=False, headers={"transfer-encoding": "chunked"}, original_response=r, ) assert resp.chunked is True setattr(resp, "supports_chunked_reads", lambda: True) setattr(resp, "release_conn", mock.Mock()) for _ in resp.stream(): continue resp.release_conn.assert_called_once_with() # type: ignore[attr-defined] def test_get_case_insensitive_headers(self) -> None: headers = {"host": "example.com"} r = HTTPResponse(headers=headers) assert r.headers.get("host") == "example.com" assert r.headers.get("Host") == "example.com" def test_retries(self) -> None: fp = BytesIO(b"") resp = HTTPResponse(fp) assert resp.retries is None retry = Retry() resp = HTTPResponse(fp, retries=retry) assert resp.retries == retry def test_geturl(self) -> None: fp = BytesIO(b"") request_url = "https://example.com" resp = HTTPResponse(fp, request_url=request_url) assert resp.geturl() == request_url def test_url(self) -> None: fp = BytesIO(b"") request_url = "https://example.com" resp = HTTPResponse(fp, request_url=request_url) assert resp.url == request_url resp.url = "https://anotherurl.com" assert resp.url == "https://anotherurl.com" def test_geturl_retries(self) -> None: fp = BytesIO(b"") resp = HTTPResponse(fp, request_url="http://example.com") request_histories = ( RequestHistory( method="GET", url="http://example.com", error=None, status=301, redirect_location="https://example.com/", ), RequestHistory( method="GET", url="https://example.com/", error=None, status=301, redirect_location="https://www.example.com", ), ) retry = Retry(history=request_histories) resp = HTTPResponse(fp, retries=retry) assert resp.geturl() == "https://www.example.com" @pytest.mark.parametrize( ["payload", "expected_stream"], [ (b"", []), (b"\n", [b"\n"]), (b"\n\n\n", [b"\n", b"\n", b"\n"]), (b"abc\ndef", [b"abc\n", b"def"]), (b"Hello\nworld\n\n\n!", [b"Hello\n", b"world\n", b"\n", b"\n", b"!"]), ], ) def test__iter__(self, payload: bytes, expected_stream: list[bytes]) -> None: actual_stream = [] for chunk in HTTPResponse(BytesIO(payload), preload_content=False): actual_stream.append(chunk) assert actual_stream == expected_stream def test__iter__decode_content(self) -> None: def stream() -> typing.Generator[bytes, None, None]: # Set up a generator to chunk the gzipped body compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"foo\nbar") data += compress.flush() for i in range(0, len(data), 2): yield data[i : i + 2] fp = MockChunkedEncodingResponse(list(stream())) r = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] r.fp = fp # type: ignore[assignment] headers = {"transfer-encoding": "chunked", "content-encoding": "gzip"} resp = HTTPResponse(r, preload_content=False, headers=headers) data = b"" for c in resp: data += c assert b"foo\nbar" == data def test_non_timeout_ssl_error_on_read(self) -> None: mac_error = ssl.SSLError( "SSL routines", "ssl3_get_record", "decryption failed or bad record mac" ) @contextlib.contextmanager def make_bad_mac_fp() -> typing.Generator[BytesIO, None, None]: fp = BytesIO(b"") with mock.patch.object(fp, "read") as fp_read: # mac/decryption error fp_read.side_effect = mac_error yield fp with make_bad_mac_fp() as fp: with pytest.raises(SSLError) as e: HTTPResponse(fp) assert e.value.args[0] == mac_error with make_bad_mac_fp() as fp: resp = HTTPResponse(fp, preload_content=False) with pytest.raises(SSLError) as e: resp.read() assert e.value.args[0] == mac_error def test_unexpected_body(self) -> None: with pytest.raises(ProtocolError) as excinfo: fp = BytesIO(b"12345") headers = {"content-length": "5"} resp = HTTPResponse(fp, status=204, headers=headers) resp.read(16) assert "Response may not contain content" in str(excinfo.value) with pytest.raises(ProtocolError): fp = BytesIO(b"12345") headers = {"content-length": "0"} resp = HTTPResponse(fp, status=204, headers=headers) resp.read(16) assert "Response may not contain content" in str(excinfo.value) with pytest.raises(ProtocolError): fp = BytesIO(b"12345") resp = HTTPResponse(fp, status=204) resp.read(16) assert "Response may not contain content" in str(excinfo.value) class MockChunkedEncodingResponse: def __init__(self, content: list[bytes]) -> None: """ content: collection of str, each str is a chunk in response """ self.content = content self.index = 0 # This class iterates over self.content. self.closed = False self.cur_chunk = b"" self.chunks_exhausted = False def _encode_chunk(self, chunk: bytes) -> bytes: # In the general case, we can't decode the chunk to unicode length = f"{len(chunk):X}\r\n" return length.encode() + chunk + b"\r\n" def _pop_new_chunk(self) -> bytes: if self.chunks_exhausted: return b"" try: chunk = self.content[self.index] except IndexError: chunk = b"" self.chunks_exhausted = True else: self.index += 1 chunk = self._encode_chunk(chunk) if not isinstance(chunk, bytes): chunk = chunk.encode() assert isinstance(chunk, bytes) return chunk def pop_current_chunk(self, amt: int = -1, till_crlf: bool = False) -> bytes: if amt > 0 and till_crlf: raise ValueError("Can't specify amt and till_crlf.") if len(self.cur_chunk) <= 0: self.cur_chunk = self._pop_new_chunk() if till_crlf: try: i = self.cur_chunk.index(b"\r\n") except ValueError: # No CRLF in current chunk -- probably caused by encoder. self.cur_chunk = b"" return b"" else: chunk_part = self.cur_chunk[: i + 2] self.cur_chunk = self.cur_chunk[i + 2 :] return chunk_part elif amt <= -1: chunk_part = self.cur_chunk self.cur_chunk = b"" return chunk_part else: try: chunk_part = self.cur_chunk[:amt] except IndexError: chunk_part = self.cur_chunk self.cur_chunk = b"" else: self.cur_chunk = self.cur_chunk[amt:] return chunk_part def readline(self) -> bytes: return self.pop_current_chunk(till_crlf=True) def read(self, amt: int = -1) -> bytes: return self.pop_current_chunk(amt) def read1(self, amt: int = -1) -> bytes: return self.pop_current_chunk(amt) def flush(self) -> None: # Python 3 wants this method. pass def close(self) -> None: self.closed = True class MockChunkedIncompleteRead(MockChunkedEncodingResponse): def _encode_chunk(self, chunk: bytes) -> bytes: return f"9999\r\n{chunk.decode()}\r\n".encode() class MockChunkedInvalidChunkLength(MockChunkedEncodingResponse): BAD_LENGTH_LINE = "ZZZ\r\n" def _encode_chunk(self, chunk: bytes) -> bytes: return f"{self.BAD_LENGTH_LINE}{chunk.decode()}\r\n".encode() class MockChunkedEncodingWithoutCRLFOnEnd(MockChunkedEncodingResponse): def _encode_chunk(self, chunk: bytes) -> bytes: return "{:X}\r\n{}{}".format( len(chunk), chunk.decode(), "\r\n" if len(chunk) > 0 else "", ).encode() class MockChunkedEncodingWithExtensions(MockChunkedEncodingResponse): def _encode_chunk(self, chunk: bytes) -> bytes: return f"{len(chunk):X};asd=qwe\r\n{chunk.decode()}\r\n".encode() class MockChunkedNoChunks(MockChunkedEncodingResponse): def _encode_chunk(self, chunk: bytes) -> bytes: return b"" class MockSock: @classmethod def makefile(cls, *args: typing.Any, **kwargs: typing.Any) -> None: return test_ssltransport.py 0000644 00000051212 15025234504 0010734 0 ustar 00 from __future__ import annotations import platform import select import socket import ssl import threading import typing from unittest import mock import pytest from dummyserver.socketserver import DEFAULT_CA, DEFAULT_CERTS from dummyserver.testcase import SocketDummyServerTestCase, consume_socket from urllib3.util import ssl_ from urllib3.util.ssltransport import SSLTransport # consume_socket can iterate forever, we add timeouts to prevent halting. PER_TEST_TIMEOUT = 60 def server_client_ssl_contexts() -> tuple[ssl.SSLContext, ssl.SSLContext]: if hasattr(ssl, "PROTOCOL_TLS_SERVER"): server_context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) server_context.load_cert_chain(DEFAULT_CERTS["certfile"], DEFAULT_CERTS["keyfile"]) if hasattr(ssl, "PROTOCOL_TLS_CLIENT"): client_context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) client_context.load_verify_locations(DEFAULT_CA) return server_context, client_context @typing.overload def sample_request(binary: typing.Literal[True] = ...) -> bytes: ... @typing.overload def sample_request(binary: typing.Literal[False]) -> str: ... def sample_request(binary: bool = True) -> bytes | str: request = ( b"GET http://www.testing.com/ HTTP/1.1\r\n" b"Host: www.testing.com\r\n" b"User-Agent: awesome-test\r\n" b"\r\n" ) return request if binary else request.decode("utf-8") def validate_request( provided_request: bytearray, binary: typing.Literal[False, True] = True ) -> None: assert provided_request is not None expected_request = sample_request(binary) assert provided_request == expected_request @typing.overload def sample_response(binary: typing.Literal[True] = ...) -> bytes: ... @typing.overload def sample_response(binary: typing.Literal[False]) -> str: ... @typing.overload def sample_response(binary: bool = ...) -> bytes | str: ... def sample_response(binary: bool = True) -> bytes | str: response = b"HTTP/1.1 200 OK\r\nContent-Length: 0\r\n\r\n" return response if binary else response.decode("utf-8") def validate_response( provided_response: bytes | bytearray | str, binary: bool = True ) -> None: assert provided_response is not None expected_response = sample_response(binary) assert provided_response == expected_response def validate_peercert(ssl_socket: SSLTransport) -> None: binary_cert = ssl_socket.getpeercert(binary_form=True) assert type(binary_cert) is bytes assert len(binary_cert) > 0 cert = ssl_socket.getpeercert() assert type(cert) is dict assert "serialNumber" in cert assert cert["serialNumber"] != "" class SingleTLSLayerTestCase(SocketDummyServerTestCase): """ Uses the SocketDummyServer to validate a single TLS layer can be established through the SSLTransport. """ @classmethod def setup_class(cls) -> None: cls.server_context, cls.client_context = server_client_ssl_contexts() def start_dummy_server( self, handler: typing.Callable[[socket.socket], None] | None = None, validate: bool = True, ) -> None: quit_event = threading.Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] try: with self.server_context.wrap_socket(sock, server_side=True) as ssock: request = consume_socket( ssock, quit_event=quit_event, ) if not validate: return validate_request(request) ssock.send(sample_response()) except (ConnectionAbortedError, ConnectionResetError): return chosen_handler = handler if handler else socket_handler self._start_server(chosen_handler, quit_event=quit_event) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_start_closed_socket(self) -> None: """Errors generated from an unconnected socket should bubble up.""" sock = socket.socket(socket.AF_INET) context = ssl.create_default_context() sock.close() with pytest.raises(OSError): SSLTransport(sock, context) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_close_after_handshake(self) -> None: """Socket errors should be bubbled up""" self.start_dummy_server(validate=False) sock = socket.create_connection((self.host, self.port)) with SSLTransport( sock, self.client_context, server_hostname="localhost" ) as ssock: ssock.close() with pytest.raises(OSError): ssock.send(b"blaaargh") @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_wrap_existing_socket(self) -> None: """Validates a single TLS layer can be established.""" self.start_dummy_server() sock = socket.create_connection((self.host, self.port)) with SSLTransport( sock, self.client_context, server_hostname="localhost" ) as ssock: assert ssock.version() is not None ssock.send(sample_request()) response = consume_socket(ssock) validate_response(response) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_unbuffered_text_makefile(self) -> None: self.start_dummy_server() sock = socket.create_connection((self.host, self.port)) with SSLTransport( sock, self.client_context, server_hostname="localhost" ) as ssock: with pytest.raises(ValueError): ssock.makefile("r", buffering=0) ssock.send(sample_request()) response = consume_socket(ssock) validate_response(response) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_unwrap_existing_socket(self) -> None: """ Validates we can break up the TLS layer A full request/response is sent over TLS, and later over plain text. """ def shutdown_handler(listener: socket.socket) -> None: with listener.accept()[0] as sock, self.server_context.wrap_socket( sock, server_side=True ) as ssl_sock: request = consume_socket(ssl_sock) validate_request(request) ssl_sock.sendall(sample_response()) with ssl_sock.unwrap() as unwrapped_sock: request = consume_socket(unwrapped_sock) validate_request(request) unwrapped_sock.sendall(sample_response()) self.start_dummy_server(shutdown_handler) with socket.create_connection((self.host, self.port)) as sock: ssock = SSLTransport(sock, self.client_context, server_hostname="localhost") # request/response over TLS. ssock.sendall(sample_request()) response = consume_socket(ssock) validate_response(response) # request/response over plaintext after unwrap. ssock.unwrap() sock.sendall(sample_request()) response = consume_socket(sock) validate_response(response) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_ssl_object_attributes(self) -> None: """Ensures common ssl attributes are exposed""" self.start_dummy_server() sock = socket.create_connection((self.host, self.port)) with SSLTransport( sock, self.client_context, server_hostname="localhost" ) as ssock: cipher = ssock.cipher() assert type(cipher) is tuple # No chosen protocol through ALPN or NPN. assert ssock.selected_alpn_protocol() is None assert ssock.selected_npn_protocol() is None shared_ciphers = ssock.shared_ciphers() # SSLContext.shared_ciphers() changed behavior completely in a patch version. # See: https://github.com/python/cpython/issues/96931 assert shared_ciphers is None or ( type(shared_ciphers) is list and len(shared_ciphers) > 0 ) assert ssock.compression() is None validate_peercert(ssock) ssock.send(sample_request()) response = consume_socket(ssock) validate_response(response) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_socket_object_attributes(self) -> None: """Ensures common socket attributes are exposed""" self.start_dummy_server() sock = socket.create_connection((self.host, self.port)) with SSLTransport( sock, self.client_context, server_hostname="localhost" ) as ssock: assert ssock.fileno() is not None test_timeout = 10 ssock.settimeout(test_timeout) assert ssock.gettimeout() == test_timeout assert ssock.socket.gettimeout() == test_timeout ssock.send(sample_request()) response = consume_socket(ssock) validate_response(response) class SocketProxyDummyServer(SocketDummyServerTestCase): """ Simulates a proxy that performs a simple I/O loop on client/server socket. """ def __init__( self, destination_server_host: str, destination_server_port: int ) -> None: self.destination_server_host = destination_server_host self.destination_server_port = destination_server_port self.server_ctx, _ = server_client_ssl_contexts() def start_proxy_handler(self) -> None: """ Socket handler for the proxy. Terminates the first TLS layer and tunnels any bytes needed for client <-> server communicatin. """ def proxy_handler(listener: socket.socket) -> None: sock = listener.accept()[0] with self.server_ctx.wrap_socket(sock, server_side=True) as client_sock: upstream_sock = socket.create_connection( (self.destination_server_host, self.destination_server_port) ) self._read_write_loop(client_sock, upstream_sock) upstream_sock.close() client_sock.close() self._start_server(proxy_handler) def _read_write_loop( self, client_sock: socket.socket, server_sock: socket.socket, chunks: int = 65536, ) -> None: inputs = [client_sock, server_sock] output = [client_sock, server_sock] while inputs: readable, writable, exception = select.select(inputs, output, inputs) if exception: # Error occurred with either of the sockets, time to # wrap up, parent func will close sockets. break for s in readable: read_socket, write_socket = None, None if s == client_sock: read_socket = client_sock write_socket = server_sock else: read_socket = server_sock write_socket = client_sock # Ensure buffer is not full before writing if write_socket in writable: try: b = read_socket.recv(chunks) if len(b) == 0: # One of the sockets has EOFed, we return to close # both. return write_socket.send(b) except ssl.SSLEOFError: # It's possible, depending on shutdown order, that we'll # try to use a socket that was closed between select # calls. return class TlsInTlsTestCase(SocketDummyServerTestCase): """ Creates a TLS in TLS tunnel by chaining a 'SocketProxyDummyServer' and a `SocketDummyServerTestCase`. Client will first connect to the proxy, who will then proxy any bytes send to the destination server. First TLS layer terminates at the proxy, second TLS layer terminates at the destination server. """ @classmethod def setup_class(cls) -> None: cls.server_context, cls.client_context = server_client_ssl_contexts() @classmethod def start_proxy_server(cls) -> None: # Proxy server will handle the first TLS connection and create a # connection to the destination server. cls.proxy_server = SocketProxyDummyServer(cls.host, cls.port) cls.proxy_server.start_proxy_handler() @classmethod def teardown_class(cls) -> None: if hasattr(cls, "proxy_server"): cls.proxy_server.teardown_class() super().teardown_class() @classmethod def start_destination_server(cls) -> None: """ Socket handler for the destination_server. Terminates the second TLS layer and send a basic HTTP response. """ def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] try: with cls.server_context.wrap_socket(sock, server_side=True) as ssock: request = consume_socket(ssock) validate_request(request) ssock.send(sample_response()) except (ssl.SSLEOFError, ssl.SSLZeroReturnError, OSError): return sock.close() cls._start_server(socket_handler) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_tls_in_tls_tunnel(self) -> None: """ Basic communication over the TLS in TLS tunnel. """ self.start_destination_server() self.start_proxy_server() sock = socket.create_connection( (self.proxy_server.host, self.proxy_server.port) ) with self.client_context.wrap_socket( sock, server_hostname="localhost" ) as proxy_sock: with SSLTransport( proxy_sock, self.client_context, server_hostname="localhost" ) as destination_sock: assert destination_sock.version() is not None destination_sock.send(sample_request()) response = consume_socket(destination_sock) validate_response(response) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_wrong_sni_hint(self) -> None: """ Provides a wrong sni hint to validate an exception is thrown. """ self.start_destination_server() self.start_proxy_server() sock = socket.create_connection( (self.proxy_server.host, self.proxy_server.port) ) with self.client_context.wrap_socket( sock, server_hostname="localhost" ) as proxy_sock: with pytest.raises(ssl.SSLCertVerificationError): SSLTransport( proxy_sock, self.client_context, server_hostname="veryverywrong" ) @pytest.mark.timeout(PER_TEST_TIMEOUT) @pytest.mark.parametrize("buffering", [None, 0]) def test_tls_in_tls_makefile_raw_rw_binary(self, buffering: int | None) -> None: """ Uses makefile with read, write and binary modes without buffering. """ self.start_destination_server() self.start_proxy_server() sock = socket.create_connection( (self.proxy_server.host, self.proxy_server.port) ) with self.client_context.wrap_socket( sock, server_hostname="localhost" ) as proxy_sock: with SSLTransport( proxy_sock, self.client_context, server_hostname="localhost" ) as destination_sock: file = destination_sock.makefile("rwb", buffering) file.write(sample_request()) # type: ignore[call-overload] file.flush() response = bytearray(65536) wrote = file.readinto(response) # type: ignore[union-attr] assert wrote is not None # Allocated response is bigger than the actual response, we # rtrim remaining x00 bytes. str_response = response.decode("utf-8").rstrip("\x00") validate_response(str_response, binary=False) file.close() @pytest.mark.skipif( platform.system() == "Windows", reason="Skipping windows due to text makefile support", ) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_tls_in_tls_makefile_rw_text(self) -> None: """ Creates a separate buffer for reading and writing using text mode and utf-8 encoding. """ self.start_destination_server() self.start_proxy_server() sock = socket.create_connection( (self.proxy_server.host, self.proxy_server.port) ) with self.client_context.wrap_socket( sock, server_hostname="localhost" ) as proxy_sock: with SSLTransport( proxy_sock, self.client_context, server_hostname="localhost" ) as destination_sock: read = destination_sock.makefile("r", encoding="utf-8") write = destination_sock.makefile("w", encoding="utf-8") write.write(sample_request(binary=False)) # type: ignore[arg-type, call-overload] write.flush() response = read.read() assert type(response) is str if "\r" not in response: # Carriage return will be removed when reading as a file on # some platforms. We add it before the comparison. assert type(response) is str response = response.replace("\n", "\r\n") validate_response(response, binary=False) @pytest.mark.timeout(PER_TEST_TIMEOUT) def test_tls_in_tls_recv_into_sendall(self) -> None: """ Valides recv_into and sendall also work as expected. Other tests are using recv/send. """ self.start_destination_server() self.start_proxy_server() sock = socket.create_connection( (self.proxy_server.host, self.proxy_server.port) ) with self.client_context.wrap_socket( sock, server_hostname="localhost" ) as proxy_sock: with SSLTransport( proxy_sock, self.client_context, server_hostname="localhost" ) as destination_sock: destination_sock.sendall(sample_request()) response = bytearray(65536) destination_sock.recv_into(response) str_response = response.decode("utf-8").rstrip("\x00") validate_response(str_response, binary=False) class TestSSLTransportWithMock: def test_constructor_params(self) -> None: server_hostname = "example-domain.com" sock = mock.Mock() context = mock.create_autospec(ssl_.SSLContext) ssl_transport = SSLTransport( sock, context, server_hostname=server_hostname, suppress_ragged_eofs=False ) context.wrap_bio.assert_called_with( mock.ANY, mock.ANY, server_hostname=server_hostname ) assert not ssl_transport.suppress_ragged_eofs def test_various_flags_errors(self) -> None: server_hostname = "example-domain.com" sock = mock.Mock() context = mock.create_autospec(ssl_.SSLContext) ssl_transport = SSLTransport( sock, context, server_hostname=server_hostname, suppress_ragged_eofs=False ) with pytest.raises(ValueError): ssl_transport.recv(flags=1) with pytest.raises(ValueError): ssl_transport.recv_into(bytearray(), flags=1) with pytest.raises(ValueError): ssl_transport.sendall(bytearray(), flags=1) with pytest.raises(ValueError): ssl_transport.send(None, flags=1) # type: ignore[arg-type] def test_makefile_wrong_mode_error(self) -> None: server_hostname = "example-domain.com" sock = mock.Mock() context = mock.create_autospec(ssl_.SSLContext) ssl_transport = SSLTransport( sock, context, server_hostname=server_hostname, suppress_ragged_eofs=False ) with pytest.raises(ValueError): ssl_transport.makefile(mode="x") def test_wrap_ssl_read_error(self) -> None: server_hostname = "example-domain.com" sock = mock.Mock() context = mock.create_autospec(ssl_.SSLContext) ssl_transport = SSLTransport( sock, context, server_hostname=server_hostname, suppress_ragged_eofs=False ) with mock.patch.object(ssl_transport, "_ssl_io_loop") as _ssl_io_loop: _ssl_io_loop.side_effect = ssl.SSLError() with pytest.raises(ssl.SSLError): ssl_transport._wrap_ssl_read(1) test_collections.py 0000644 00000033746 15025234504 0010510 0 ustar 00 from __future__ import annotations import typing import pytest from urllib3._collections import HTTPHeaderDict from urllib3._collections import RecentlyUsedContainer as Container class TestLRUContainer: def test_maxsize(self) -> None: d: Container[int, str] = Container(5) for i in range(5): d[i] = str(i) assert len(d) == 5 for i in range(5): assert d[i] == str(i) d[i + 1] = str(i + 1) assert len(d) == 5 assert 0 not in d assert (i + 1) in d def test_maxsize_0(self) -> None: d: Container[int, int] = Container(0) d[1] = 1 assert len(d) == 0 def test_expire(self) -> None: d: Container[int, str] = Container(5) for i in range(5): d[i] = str(i) for i in range(5): d.get(0) # Add one more entry d[5] = "5" # Check state assert list(d._container.keys()) == [2, 3, 4, 0, 5] def test_same_key(self) -> None: d: Container[str, int] = Container(5) for i in range(10): d["foo"] = i assert list(d._container.keys()) == ["foo"] assert len(d) == 1 def test_access_ordering(self) -> None: d: Container[int, bool] = Container(5) for i in range(10): d[i] = True # Keys should be ordered by access time assert list(d._container.keys()) == [5, 6, 7, 8, 9] new_order = [7, 8, 6, 9, 5] for k in new_order: d[k] assert list(d._container.keys()) == new_order def test_delete(self) -> None: d: Container[int, bool] = Container(5) for i in range(5): d[i] = True del d[0] assert 0 not in d d.pop(1) assert 1 not in d d.pop(1, None) def test_get(self) -> None: d: Container[int, bool | int] = Container(5) for i in range(5): d[i] = True r = d.get(4) assert r is True r = d.get(5) assert r is None r = d.get(5, 42) assert r == 42 with pytest.raises(KeyError): d[5] def test_disposal(self) -> None: evicted_items: list[int] = [] def dispose_func(arg: int) -> None: # Save the evicted datum for inspection evicted_items.append(arg) d: Container[int, int] = Container(5, dispose_func=dispose_func) for i in range(5): d[i] = i assert list(d._container.keys()) == list(range(5)) assert evicted_items == [] # Nothing disposed d[5] = 5 assert list(d._container.keys()) == list(range(1, 6)) assert evicted_items == [0] del d[1] assert evicted_items == [0, 1] d.clear() assert evicted_items == [0, 1, 2, 3, 4, 5] def test_iter(self) -> None: d: Container[str, str] = Container() with pytest.raises(NotImplementedError): d.__iter__() class NonMappingHeaderContainer: def __init__(self, **kwargs: str) -> None: self._data = {} self._data.update(kwargs) def keys(self) -> typing.Iterator[str]: return iter(self._data) def __getitem__(self, key: str) -> str: return self._data[key] @pytest.fixture() def d() -> HTTPHeaderDict: header_dict = HTTPHeaderDict(Cookie="foo") header_dict.add("cookie", "bar") return header_dict class TestHTTPHeaderDict: def test_create_from_kwargs(self) -> None: h = HTTPHeaderDict(ab="1", cd="2", ef="3", gh="4") assert len(h) == 4 assert "ab" in h def test_setdefault(self) -> None: h = HTTPHeaderDict(a="1") assert h.setdefault("A", "3") == "1" assert h.setdefault("b", "2") == "2" assert h.setdefault("c") == "" assert h["c"] == "" assert h["b"] == "2" def test_create_from_dict(self) -> None: h = HTTPHeaderDict(dict(ab="1", cd="2", ef="3", gh="4")) assert len(h) == 4 assert "ab" in h def test_create_from_iterator(self) -> None: teststr = "urllib3ontherocks" h = HTTPHeaderDict((c, c * 5) for c in teststr) assert len(h) == len(set(teststr)) def test_create_from_list(self) -> None: headers = [ ("ab", "A"), ("cd", "B"), ("cookie", "C"), ("cookie", "D"), ("cookie", "E"), ] h = HTTPHeaderDict(headers) assert len(h) == 3 assert "ab" in h clist = h.getlist("cookie") assert len(clist) == 3 assert clist[0] == "C" assert clist[-1] == "E" def test_create_from_headerdict(self) -> None: headers = [ ("ab", "A"), ("cd", "B"), ("cookie", "C"), ("cookie", "D"), ("cookie", "E"), ] org = HTTPHeaderDict(headers) h = HTTPHeaderDict(org) assert len(h) == 3 assert "ab" in h clist = h.getlist("cookie") assert len(clist) == 3 assert clist[0] == "C" assert clist[-1] == "E" assert h is not org assert h == org def test_setitem(self, d: HTTPHeaderDict) -> None: d["Cookie"] = "foo" # The bytes value gets converted to str. The API is typed for str only, # but the implementation continues supports bytes. d[b"Cookie"] = "bar" # type: ignore[index] assert d["cookie"] == "bar" d["cookie"] = "with, comma" assert d.getlist("cookie") == ["with, comma"] def test_update(self, d: HTTPHeaderDict) -> None: d.update(dict(Cookie="foo")) assert d["cookie"] == "foo" d.update(dict(cookie="with, comma")) assert d.getlist("cookie") == ["with, comma"] def test_delitem(self, d: HTTPHeaderDict) -> None: del d["cookie"] assert "cookie" not in d assert "COOKIE" not in d def test_add_well_known_multiheader(self, d: HTTPHeaderDict) -> None: d.add("COOKIE", "asdf") assert d.getlist("cookie") == ["foo", "bar", "asdf"] assert d["cookie"] == "foo, bar, asdf" def test_add_comma_separated_multiheader(self, d: HTTPHeaderDict) -> None: d.add("bar", "foo") # The bytes value gets converted to str. The API is typed for str only, # but the implementation continues supports bytes. d.add(b"BAR", "bar") # type: ignore[arg-type] d.add("Bar", "asdf") assert d.getlist("bar") == ["foo", "bar", "asdf"] assert d["bar"] == "foo, bar, asdf" def test_extend_from_list(self, d: HTTPHeaderDict) -> None: d.extend([("set-cookie", "100"), ("set-cookie", "200"), ("set-cookie", "300")]) assert d["set-cookie"] == "100, 200, 300" def test_extend_from_dict(self, d: HTTPHeaderDict) -> None: d.extend(dict(cookie="asdf"), b="100") assert d["cookie"] == "foo, bar, asdf" assert d["b"] == "100" d.add("cookie", "with, comma") assert d.getlist("cookie") == ["foo", "bar", "asdf", "with, comma"] def test_extend_from_container(self, d: HTTPHeaderDict) -> None: h = NonMappingHeaderContainer(Cookie="foo", e="foofoo") d.extend(h) assert d["cookie"] == "foo, bar, foo" assert d["e"] == "foofoo" assert len(d) == 2 def test_header_repeat(self, d: HTTPHeaderDict) -> None: d["other-header"] = "hello" d.add("other-header", "world") assert list(d.items()) == [ ("Cookie", "foo"), ("Cookie", "bar"), ("other-header", "hello"), ("other-header", "world"), ] d.add("other-header", "!", combine=True) expected_results = [ ("Cookie", "foo"), ("Cookie", "bar"), ("other-header", "hello"), ("other-header", "world, !"), ] assert list(d.items()) == expected_results # make sure the values persist over copies assert list(d.copy().items()) == expected_results other_dict = HTTPHeaderDict() # we also need for extensions to properly maintain results other_dict.extend(d) assert list(other_dict.items()) == expected_results def test_extend_from_headerdict(self, d: HTTPHeaderDict) -> None: h = HTTPHeaderDict(Cookie="foo", e="foofoo") d.extend(h) assert d["cookie"] == "foo, bar, foo" assert d["e"] == "foofoo" assert len(d) == 2 @pytest.mark.parametrize("args", [(1, 2), (1, 2, 3, 4, 5)]) def test_extend_with_wrong_number_of_args_is_typeerror( self, d: HTTPHeaderDict, args: tuple[int, ...] ) -> None: with pytest.raises( TypeError, match=r"extend\(\) takes at most 1 positional arguments" ): d.extend(*args) # type: ignore[arg-type] def test_copy(self, d: HTTPHeaderDict) -> None: h = d.copy() assert d is not h assert d == h def test_getlist(self, d: HTTPHeaderDict) -> None: assert d.getlist("cookie") == ["foo", "bar"] assert d.getlist("Cookie") == ["foo", "bar"] assert d.getlist("b") == [] d.add("b", "asdf") assert d.getlist("b") == ["asdf"] def test_getlist_after_copy(self, d: HTTPHeaderDict) -> None: assert d.getlist("cookie") == HTTPHeaderDict(d).getlist("cookie") def test_equal(self, d: HTTPHeaderDict) -> None: b = HTTPHeaderDict(cookie="foo, bar") c = NonMappingHeaderContainer(cookie="foo, bar") e = [("cookie", "foo, bar")] assert d == b assert d == c assert d == e assert d != 2 def test_not_equal(self, d: HTTPHeaderDict) -> None: b = HTTPHeaderDict(cookie="foo, bar") c = NonMappingHeaderContainer(cookie="foo, bar") e = [("cookie", "foo, bar")] assert not (d != b) assert not (d != c) assert not (d != e) assert d != 2 def test_pop(self, d: HTTPHeaderDict) -> None: key = "Cookie" a = d[key] b = d.pop(key) assert a == b assert key not in d with pytest.raises(KeyError): d.pop(key) dummy = object() assert dummy is d.pop(key, dummy) def test_discard(self, d: HTTPHeaderDict) -> None: d.discard("cookie") assert "cookie" not in d d.discard("cookie") def test_len(self, d: HTTPHeaderDict) -> None: assert len(d) == 1 d.add("cookie", "bla") d.add("asdf", "foo") # len determined by unique fieldnames assert len(d) == 2 def test_repr(self, d: HTTPHeaderDict) -> None: rep = "HTTPHeaderDict({'Cookie': 'foo, bar'})" assert repr(d) == rep def test_items(self, d: HTTPHeaderDict) -> None: items = d.items() assert len(items) == 2 assert list(items) == [ ("Cookie", "foo"), ("Cookie", "bar"), ] assert ("Cookie", "foo") in items assert ("Cookie", "bar") in items assert ("X-Some-Header", "foo") not in items assert ("Cookie", "not_present") not in items assert ("Cookie", 1) not in items # type: ignore[comparison-overlap] assert "Cookie" not in items # type: ignore[comparison-overlap] def test_dict_conversion(self, d: HTTPHeaderDict) -> None: # Also tested in connectionpool, needs to preserve case hdict = { "Content-Length": "0", "Content-type": "text/plain", "Server": "Hypercorn/1.2.3", } h = dict(HTTPHeaderDict(hdict).items()) assert hdict == h assert hdict == dict(HTTPHeaderDict(hdict)) def test_string_enforcement(self, d: HTTPHeaderDict) -> None: # This currently throws AttributeError on key.lower(), should # probably be something nicer with pytest.raises(Exception): d[3] = "5" # type: ignore[index] with pytest.raises(Exception): d.add(3, "4") # type: ignore[arg-type] with pytest.raises(Exception): del d[3] # type: ignore[arg-type] with pytest.raises(Exception): HTTPHeaderDict({3: 3}) # type: ignore[arg-type] def test_dunder_contains(self, d: HTTPHeaderDict) -> None: """ Test: HTTPHeaderDict.__contains__ returns True - for matched string objects - for case-similar string objects HTTPHeaderDict.__contains__ returns False - for non-similar strings - for non-strings, even if they are keys in the underlying datastructure """ assert "cookie" in d assert "CoOkIe" in d assert "Not a cookie" not in d marker = object() d._container[marker] = ["some", "strings"] # type: ignore[index] assert marker not in d assert marker in d._container def test_union(self, d: HTTPHeaderDict) -> None: to_merge = {"Cookie": "tim-tam"} result = d | to_merge assert result == HTTPHeaderDict({"Cookie": "foo, bar, tim-tam"}) assert to_merge == {"Cookie": "tim-tam"} assert d == HTTPHeaderDict({"Cookie": "foo, bar"}) def test_union_rhs(self, d: HTTPHeaderDict) -> None: to_merge = {"Cookie": "tim-tam"} result = to_merge | d assert result == HTTPHeaderDict({"Cookie": "tim-tam, foo, bar"}) assert to_merge == {"Cookie": "tim-tam"} assert d == HTTPHeaderDict({"Cookie": "foo, bar"}) def test_inplace_union(self, d: HTTPHeaderDict) -> None: to_merge = {"Cookie": "tim-tam"} d |= to_merge assert d == HTTPHeaderDict({"Cookie": "foo, bar, tim-tam"}) def test_union_with_unsupported_type(self, d: HTTPHeaderDict) -> None: with pytest.raises(TypeError, match="unsupported operand type.*'int'"): d | 42 with pytest.raises(TypeError, match="unsupported operand type.*'float'"): 3.14 | d def test_inplace_union_with_unsupported_type(self, d: HTTPHeaderDict) -> None: with pytest.raises(TypeError, match="unsupported operand type.*'NoneType'"): d |= None test_filepost.py 0000644 00000007247 15025234504 0010014 0 ustar 00 from __future__ import annotations import pytest from urllib3.fields import RequestField from urllib3.filepost import _TYPE_FIELDS, encode_multipart_formdata BOUNDARY = "!! test boundary !!" BOUNDARY_BYTES = BOUNDARY.encode() class TestMultipartEncoding: @pytest.mark.parametrize( "fields", [dict(k="v", k2="v2"), [("k", "v"), ("k2", "v2")]] ) def test_input_datastructures(self, fields: _TYPE_FIELDS) -> None: encoded, _ = encode_multipart_formdata(fields, boundary=BOUNDARY) assert encoded.count(BOUNDARY_BYTES) == 3 @pytest.mark.parametrize( "fields", [ [("k", "v"), ("k2", "v2")], [("k", b"v"), ("k2", b"v2")], [("k", b"v"), ("k2", "v2")], ], ) def test_field_encoding(self, fields: _TYPE_FIELDS) -> None: encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) expected = ( b"--" + BOUNDARY_BYTES + b"\r\n" b'Content-Disposition: form-data; name="k"\r\n' b"\r\n" b"v\r\n" b"--" + BOUNDARY_BYTES + b"\r\n" b'Content-Disposition: form-data; name="k2"\r\n' b"\r\n" b"v2\r\n" b"--" + BOUNDARY_BYTES + b"--\r\n" ) assert encoded == expected assert content_type == "multipart/form-data; boundary=" + str(BOUNDARY) def test_filename(self) -> None: fields = [("k", ("somename", b"v"))] encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) expected = ( b"--" + BOUNDARY_BYTES + b"\r\n" b'Content-Disposition: form-data; name="k"; filename="somename"\r\n' b"Content-Type: application/octet-stream\r\n" b"\r\n" b"v\r\n" b"--" + BOUNDARY_BYTES + b"--\r\n" ) assert encoded == expected assert content_type == "multipart/form-data; boundary=" + str(BOUNDARY) def test_textplain(self) -> None: fields = [("k", ("somefile.txt", b"v"))] encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) expected = ( b"--" + BOUNDARY_BYTES + b"\r\n" b'Content-Disposition: form-data; name="k"; filename="somefile.txt"\r\n' b"Content-Type: text/plain\r\n" b"\r\n" b"v\r\n" b"--" + BOUNDARY_BYTES + b"--\r\n" ) assert encoded == expected assert content_type == "multipart/form-data; boundary=" + str(BOUNDARY) def test_explicit(self) -> None: fields = [("k", ("somefile.txt", b"v", "image/jpeg"))] encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) expected = ( b"--" + BOUNDARY_BYTES + b"\r\n" b'Content-Disposition: form-data; name="k"; filename="somefile.txt"\r\n' b"Content-Type: image/jpeg\r\n" b"\r\n" b"v\r\n" b"--" + BOUNDARY_BYTES + b"--\r\n" ) assert encoded == expected assert content_type == "multipart/form-data; boundary=" + str(BOUNDARY) def test_request_fields(self) -> None: fields = [ RequestField( "k", b"v", filename="somefile.txt", headers={"Content-Type": "image/jpeg"}, ) ] encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY) expected = ( b"--" + BOUNDARY_BYTES + b"\r\n" b"Content-Type: image/jpeg\r\n" b"\r\n" b"v\r\n" b"--" + BOUNDARY_BYTES + b"--\r\n" ) assert encoded == expected test_compatibility.py 0000644 00000001264 15025234504 0011031 0 ustar 00 from __future__ import annotations import http.cookiejar import urllib from urllib3.response import HTTPResponse class TestCookiejar: def test_extract(self) -> None: request = urllib.request.Request("http://google.com") cookiejar = http.cookiejar.CookieJar() response = HTTPResponse() cookies = [ "sessionhash=abcabcabcabcab; path=/; HttpOnly", "lastvisit=1348253375; expires=Sat, 21-Sep-2050 18:49:35 GMT; path=/", ] for c in cookies: response.headers.add("set-cookie", c) cookiejar.extract_cookies(response, request) # type: ignore[arg-type] assert len(cookiejar) == len(cookies) test_exceptions.py 0000644 00000004164 15025234504 0010343 0 ustar 00 from __future__ import annotations import pickle from email.errors import MessageDefect from test import DUMMY_POOL import pytest from urllib3.connection import HTTPConnection from urllib3.connectionpool import HTTPConnectionPool from urllib3.exceptions import ( ClosedPoolError, ConnectTimeoutError, EmptyPoolError, HeaderParsingError, HostChangedError, HTTPError, LocationParseError, MaxRetryError, NewConnectionError, ReadTimeoutError, ) class TestPickle: @pytest.mark.parametrize( "exception", [ HTTPError(None), MaxRetryError(DUMMY_POOL, "", None), LocationParseError(""), ConnectTimeoutError(None), HTTPError("foo"), HTTPError("foo", IOError("foo")), MaxRetryError(HTTPConnectionPool("localhost"), "/", None), LocationParseError("fake location"), ClosedPoolError(HTTPConnectionPool("localhost"), ""), EmptyPoolError(HTTPConnectionPool("localhost"), ""), HostChangedError(HTTPConnectionPool("localhost"), "/", 0), ReadTimeoutError(HTTPConnectionPool("localhost"), "/", ""), ], ) def test_exceptions(self, exception: Exception) -> None: result = pickle.loads(pickle.dumps(exception)) assert isinstance(result, type(exception)) class TestFormat: def test_header_parsing_errors(self) -> None: hpe = HeaderParsingError([MessageDefect("defects")], "unparsed_data") assert "defects" in str(hpe) assert "unparsed_data" in str(hpe) class TestNewConnectionError: def test_pool_property_deprecation_warning(self) -> None: err = NewConnectionError(HTTPConnection("localhost"), "test") with pytest.warns(DeprecationWarning) as records: err_pool = err.pool assert err_pool is err.conn msg = ( "The 'pool' property is deprecated and will be removed " "in urllib3 v2.1.0. Use 'conn' instead." ) record = records[0] assert isinstance(record.message, Warning) assert record.message.args[0] == msg test_retry.py 0000644 00000040506 15025234504 0007327 0 ustar 00 from __future__ import annotations import datetime from test import DUMMY_POOL from unittest import mock import pytest from urllib3.exceptions import ( ConnectTimeoutError, InvalidHeader, MaxRetryError, ReadTimeoutError, ResponseError, SSLError, ) from urllib3.response import HTTPResponse from urllib3.util.retry import RequestHistory, Retry class TestRetry: def test_string(self) -> None: """Retry string representation looks the way we expect""" retry = Retry() assert ( str(retry) == "Retry(total=10, connect=None, read=None, redirect=None, status=None)" ) for _ in range(3): retry = retry.increment(method="GET") assert ( str(retry) == "Retry(total=7, connect=None, read=None, redirect=None, status=None)" ) def test_retry_both_specified(self) -> None: """Total can win if it's lower than the connect value""" error = ConnectTimeoutError() retry = Retry(connect=3, total=2) retry = retry.increment(error=error) retry = retry.increment(error=error) with pytest.raises(MaxRetryError) as e: retry.increment(error=error) assert e.value.reason == error def test_retry_higher_total_loses(self) -> None: """A lower connect timeout than the total is honored""" error = ConnectTimeoutError() retry = Retry(connect=2, total=3) retry = retry.increment(error=error) retry = retry.increment(error=error) with pytest.raises(MaxRetryError): retry.increment(error=error) def test_retry_higher_total_loses_vs_read(self) -> None: """A lower read timeout than the total is honored""" error = ReadTimeoutError(DUMMY_POOL, "/", "read timed out") retry = Retry(read=2, total=3) retry = retry.increment(method="GET", error=error) retry = retry.increment(method="GET", error=error) with pytest.raises(MaxRetryError): retry.increment(method="GET", error=error) def test_retry_total_none(self) -> None: """if Total is none, connect error should take precedence""" error = ConnectTimeoutError() retry = Retry(connect=2, total=None) retry = retry.increment(error=error) retry = retry.increment(error=error) with pytest.raises(MaxRetryError) as e: retry.increment(error=error) assert e.value.reason == error timeout_error = ReadTimeoutError(DUMMY_POOL, "/", "read timed out") retry = Retry(connect=2, total=None) retry = retry.increment(method="GET", error=timeout_error) retry = retry.increment(method="GET", error=timeout_error) retry = retry.increment(method="GET", error=timeout_error) assert not retry.is_exhausted() def test_retry_default(self) -> None: """If no value is specified, should retry connects 3 times""" retry = Retry() assert retry.total == 10 assert retry.connect is None assert retry.read is None assert retry.redirect is None assert retry.other is None error = ConnectTimeoutError() retry = Retry(connect=1) retry = retry.increment(error=error) with pytest.raises(MaxRetryError): retry.increment(error=error) retry = Retry(connect=1) retry = retry.increment(error=error) assert not retry.is_exhausted() assert Retry(0).raise_on_redirect assert not Retry(False).raise_on_redirect def test_retry_other(self) -> None: """If an unexpected error is raised, should retry other times""" other_error = SSLError() retry = Retry(connect=1) retry = retry.increment(error=other_error) retry = retry.increment(error=other_error) assert not retry.is_exhausted() retry = Retry(other=1) retry = retry.increment(error=other_error) with pytest.raises(MaxRetryError) as e: retry.increment(error=other_error) assert e.value.reason == other_error def test_retry_read_zero(self) -> None: """No second chances on read timeouts, by default""" error = ReadTimeoutError(DUMMY_POOL, "/", "read timed out") retry = Retry(read=0) with pytest.raises(MaxRetryError) as e: retry.increment(method="GET", error=error) assert e.value.reason == error def test_status_counter(self) -> None: resp = HTTPResponse(status=400) retry = Retry(status=2) retry = retry.increment(response=resp) retry = retry.increment(response=resp) msg = ResponseError.SPECIFIC_ERROR.format(status_code=400) with pytest.raises(MaxRetryError, match=msg): retry.increment(response=resp) def test_backoff(self) -> None: """Backoff is computed correctly""" max_backoff = Retry.DEFAULT_BACKOFF_MAX retry = Retry(total=100, backoff_factor=0.2) assert retry.get_backoff_time() == 0 # First request retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0 # First retry retry = retry.increment(method="GET") assert retry.backoff_factor == 0.2 assert retry.total == 98 assert retry.get_backoff_time() == 0.4 # Start backoff retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0.8 retry = retry.increment(method="GET") assert retry.get_backoff_time() == 1.6 for _ in range(10): retry = retry.increment(method="GET") assert retry.get_backoff_time() == max_backoff def test_configurable_backoff_max(self) -> None: """Configurable backoff is computed correctly""" max_backoff = 1 retry = Retry(total=100, backoff_factor=0.2, backoff_max=max_backoff) assert retry.get_backoff_time() == 0 # First request retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0 # First retry retry = retry.increment(method="GET") assert retry.backoff_factor == 0.2 assert retry.total == 98 assert retry.get_backoff_time() == 0.4 # Start backoff retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0.8 retry = retry.increment(method="GET") assert retry.get_backoff_time() == max_backoff retry = retry.increment(method="GET") assert retry.get_backoff_time() == max_backoff def test_backoff_jitter(self) -> None: """Backoff with jitter is computed correctly""" max_backoff = 1 jitter = 0.4 retry = Retry( total=100, backoff_factor=0.2, backoff_max=max_backoff, backoff_jitter=jitter, ) assert retry.get_backoff_time() == 0 # First request retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0 # First retry retry = retry.increment(method="GET") assert retry.backoff_factor == 0.2 assert retry.total == 98 assert 0.4 <= retry.get_backoff_time() <= 0.8 # Start backoff retry = retry.increment(method="GET") assert 0.8 <= retry.get_backoff_time() <= max_backoff retry = retry.increment(method="GET") assert retry.get_backoff_time() == max_backoff retry = retry.increment(method="GET") assert retry.get_backoff_time() == max_backoff def test_zero_backoff(self) -> None: retry = Retry() assert retry.get_backoff_time() == 0 retry = retry.increment(method="GET") retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0 def test_backoff_reset_after_redirect(self) -> None: retry = Retry(total=100, redirect=5, backoff_factor=0.2) retry = retry.increment(method="GET") retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0.4 redirect_response = HTTPResponse(status=302, headers={"location": "test"}) retry = retry.increment(method="GET", response=redirect_response) assert retry.get_backoff_time() == 0 retry = retry.increment(method="GET") retry = retry.increment(method="GET") assert retry.get_backoff_time() == 0.4 def test_sleep(self) -> None: # sleep a very small amount of time so our code coverage is happy retry = Retry(backoff_factor=0.0001) retry = retry.increment(method="GET") retry = retry.increment(method="GET") retry.sleep() def test_status_forcelist(self) -> None: retry = Retry(status_forcelist=range(500, 600)) assert not retry.is_retry("GET", status_code=200) assert not retry.is_retry("GET", status_code=400) assert retry.is_retry("GET", status_code=500) retry = Retry(total=1, status_forcelist=[418]) assert not retry.is_retry("GET", status_code=400) assert retry.is_retry("GET", status_code=418) # String status codes are not matched. retry = Retry(total=1, status_forcelist=["418"]) # type: ignore[list-item] assert not retry.is_retry("GET", status_code=418) def test_allowed_methods_with_status_forcelist(self) -> None: # Falsey allowed_methods means to retry on any method. retry = Retry(status_forcelist=[500], allowed_methods=None) assert retry.is_retry("GET", status_code=500) assert retry.is_retry("POST", status_code=500) # Criteria of allowed_methods and status_forcelist are ANDed. retry = Retry(status_forcelist=[500], allowed_methods=["POST"]) assert not retry.is_retry("GET", status_code=500) assert retry.is_retry("POST", status_code=500) def test_exhausted(self) -> None: assert not Retry(0).is_exhausted() assert Retry(-1).is_exhausted() assert Retry(1).increment(method="GET").total == 0 @pytest.mark.parametrize("total", [-1, 0]) def test_disabled(self, total: int) -> None: with pytest.raises(MaxRetryError): Retry(total).increment(method="GET") def test_error_message(self) -> None: retry = Retry(total=0) with pytest.raises(MaxRetryError, match="read timed out") as e: retry = retry.increment( method="GET", error=ReadTimeoutError(DUMMY_POOL, "/", "read timed out") ) assert "Caused by redirect" not in str(e.value) retry = Retry(total=1) retry = retry.increment("POST", "/") with pytest.raises(MaxRetryError, match=ResponseError.GENERIC_ERROR) as e: retry = retry.increment("POST", "/") assert "Caused by redirect" not in str(e.value) assert isinstance(e.value.reason, ResponseError) retry = Retry(total=1) response = HTTPResponse(status=500) msg = ResponseError.SPECIFIC_ERROR.format(status_code=500) retry = retry.increment("POST", "/", response=response) with pytest.raises(MaxRetryError, match=msg) as e: retry = retry.increment("POST", "/", response=response) assert "Caused by redirect" not in str(e.value) retry = Retry(connect=1) retry = retry.increment(error=ConnectTimeoutError("conntimeout")) with pytest.raises(MaxRetryError, match="conntimeout") as e: retry = retry.increment(error=ConnectTimeoutError("conntimeout")) assert "Caused by redirect" not in str(e.value) def test_history(self) -> None: retry = Retry(total=10, allowed_methods=frozenset(["GET", "POST"])) assert retry.history == tuple() connection_error = ConnectTimeoutError("conntimeout") retry = retry.increment("GET", "/test1", None, connection_error) test_history1 = (RequestHistory("GET", "/test1", connection_error, None, None),) assert retry.history == test_history1 read_error = ReadTimeoutError(DUMMY_POOL, "/test2", "read timed out") retry = retry.increment("POST", "/test2", None, read_error) test_history2 = ( RequestHistory("GET", "/test1", connection_error, None, None), RequestHistory("POST", "/test2", read_error, None, None), ) assert retry.history == test_history2 response = HTTPResponse(status=500) retry = retry.increment("GET", "/test3", response, None) test_history3 = ( RequestHistory("GET", "/test1", connection_error, None, None), RequestHistory("POST", "/test2", read_error, None, None), RequestHistory("GET", "/test3", None, 500, None), ) assert retry.history == test_history3 def test_retry_method_not_allowed(self) -> None: error = ReadTimeoutError(DUMMY_POOL, "/", "read timed out") retry = Retry() with pytest.raises(ReadTimeoutError): retry.increment(method="POST", error=error) def test_retry_default_remove_headers_on_redirect(self) -> None: retry = Retry() assert retry.remove_headers_on_redirect == { "authorization", "proxy-authorization", "cookie", } def test_retry_set_remove_headers_on_redirect(self) -> None: retry = Retry(remove_headers_on_redirect=["X-API-Secret"]) assert retry.remove_headers_on_redirect == {"x-api-secret"} @pytest.mark.parametrize("value", ["-1", "+1", "1.0", "\xb2"]) # \xb2 = ^2 def test_parse_retry_after_invalid(self, value: str) -> None: retry = Retry() with pytest.raises(InvalidHeader): retry.parse_retry_after(value) @pytest.mark.parametrize( "value, expected", [("0", 0), ("1000", 1000), ("\t42 ", 42)] ) def test_parse_retry_after(self, value: str, expected: int) -> None: retry = Retry() assert retry.parse_retry_after(value) == expected @pytest.mark.parametrize("respect_retry_after_header", [True, False]) def test_respect_retry_after_header_propagated( self, respect_retry_after_header: bool ) -> None: retry = Retry(respect_retry_after_header=respect_retry_after_header) new_retry = retry.new() assert new_retry.respect_retry_after_header == respect_retry_after_header @pytest.mark.parametrize( "retry_after_header,respect_retry_after_header,sleep_duration", [ ("3600", True, 3600), ("3600", False, None), # Will sleep due to header is 1 hour in future ("Mon, 3 Jun 2019 12:00:00 UTC", True, 3600), # Won't sleep due to not respecting header ("Mon, 3 Jun 2019 12:00:00 UTC", False, None), # Won't sleep due to current time reached ("Mon, 3 Jun 2019 11:00:00 UTC", True, None), # Won't sleep due to current time reached + not respecting header ("Mon, 3 Jun 2019 11:00:00 UTC", False, None), # Handle all the formats in RFC 7231 Section 7.1.1.1 ("Mon, 03 Jun 2019 11:30:12 GMT", True, 1812), ("Monday, 03-Jun-19 11:30:12 GMT", True, 1812), # Assume that datetimes without a timezone are in UTC per RFC 7231 ("Mon Jun 3 11:30:12 2019", True, 1812), ], ) @pytest.mark.parametrize( "stub_timezone", [ "UTC", "Asia/Jerusalem", None, ], indirect=True, ) @pytest.mark.usefixtures("stub_timezone") def test_respect_retry_after_header_sleep( self, retry_after_header: str, respect_retry_after_header: bool, sleep_duration: int | None, ) -> None: retry = Retry(respect_retry_after_header=respect_retry_after_header) with mock.patch( "time.time", return_value=datetime.datetime( 2019, 6, 3, 11, tzinfo=datetime.timezone.utc ).timestamp(), ), mock.patch("time.sleep") as sleep_mock: # for the default behavior, it must be in RETRY_AFTER_STATUS_CODES response = HTTPResponse( status=503, headers={"Retry-After": retry_after_header} ) retry.sleep(response) # The expected behavior is that we'll only sleep if respecting # this header (since we won't have any backoff sleep attempts) if respect_retry_after_header and sleep_duration is not None: sleep_mock.assert_called_with(sleep_duration) else: sleep_mock.assert_not_called() tz_stub.py 0000644 00000002243 15025234504 0006611 0 ustar 00 from __future__ import annotations import datetime import os import time import typing from contextlib import contextmanager import pytest try: import zoneinfo except ImportError: # Python < 3.9 from backports import zoneinfo # type: ignore[no-redef] @contextmanager def stub_timezone_ctx(tzname: str | None) -> typing.Generator[None, None, None]: """ Switch to a locally-known timezone specified by `tzname`. On exit, restore the previous timezone. If `tzname` is `None`, do nothing. """ if tzname is None: yield return # Only supported on Unix if not hasattr(time, "tzset"): pytest.skip("Timezone patching is not supported") # Make sure the new timezone exists try: zoneinfo.ZoneInfo(tzname) except zoneinfo.ZoneInfoNotFoundError: raise ValueError(f"Invalid timezone specified: {tzname!r}") # Get the current timezone old_tzname = datetime.datetime.now().astimezone().tzname() if old_tzname is None: raise OSError("Cannot determine current timezone") os.environ["TZ"] = tzname time.tzset() yield os.environ["TZ"] = old_tzname time.tzset() test_util.py 0000644 00000125366 15025234504 0007147 0 ustar 00 from __future__ import annotations import io import logging import socket import ssl import sys import typing import warnings from itertools import chain from test import ImportBlocker, ModuleStash, notBrotli, notZstd, onlyBrotli, onlyZstd from unittest import mock from unittest.mock import MagicMock, Mock, patch from urllib.parse import urlparse import pytest from urllib3 import add_stderr_logger, disable_warnings from urllib3.connection import ProxyConfig from urllib3.exceptions import ( InsecureRequestWarning, LocationParseError, TimeoutStateError, UnrewindableBodyError, ) from urllib3.util import is_fp_closed from urllib3.util.connection import _has_ipv6, allowed_gai_family, create_connection from urllib3.util.proxy import connection_requires_http_tunnel from urllib3.util.request import _FAILEDTELL, make_headers, rewind_body from urllib3.util.response import assert_header_parsing from urllib3.util.ssl_ import ( _TYPE_VERSION_INFO, _is_has_never_check_common_name_reliable, resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, ) from urllib3.util.timeout import _DEFAULT_TIMEOUT, Timeout from urllib3.util.url import Url, _encode_invalid_chars, parse_url from urllib3.util.util import to_bytes, to_str from . import clear_warnings # This number represents a time in seconds, it doesn't mean anything in # isolation. Setting to a high-ish value to avoid conflicts with the smaller # numbers used for timeouts TIMEOUT_EPOCH = 1000 class TestUtil: url_host_map = [ # Hosts ("http://google.com/mail", ("http", "google.com", None)), ("http://google.com/mail/", ("http", "google.com", None)), ("google.com/mail", ("http", "google.com", None)), ("http://google.com/", ("http", "google.com", None)), ("http://google.com", ("http", "google.com", None)), ("http://www.google.com", ("http", "www.google.com", None)), ("http://mail.google.com", ("http", "mail.google.com", None)), ("http://google.com:8000/mail/", ("http", "google.com", 8000)), ("http://google.com:8000", ("http", "google.com", 8000)), ("https://google.com", ("https", "google.com", None)), ("https://google.com:8000", ("https", "google.com", 8000)), ("http://user:password@127.0.0.1:1234", ("http", "127.0.0.1", 1234)), ("http://google.com/foo=http://bar:42/baz", ("http", "google.com", None)), ("http://google.com?foo=http://bar:42/baz", ("http", "google.com", None)), ("http://google.com#foo=http://bar:42/baz", ("http", "google.com", None)), # IPv4 ("173.194.35.7", ("http", "173.194.35.7", None)), ("http://173.194.35.7", ("http", "173.194.35.7", None)), ("http://173.194.35.7/test", ("http", "173.194.35.7", None)), ("http://173.194.35.7:80", ("http", "173.194.35.7", 80)), ("http://173.194.35.7:80/test", ("http", "173.194.35.7", 80)), # IPv6 ("[2a00:1450:4001:c01::67]", ("http", "[2a00:1450:4001:c01::67]", None)), ("http://[2a00:1450:4001:c01::67]", ("http", "[2a00:1450:4001:c01::67]", None)), ( "http://[2a00:1450:4001:c01::67]/test", ("http", "[2a00:1450:4001:c01::67]", None), ), ( "http://[2a00:1450:4001:c01::67]:80", ("http", "[2a00:1450:4001:c01::67]", 80), ), ( "http://[2a00:1450:4001:c01::67]:80/test", ("http", "[2a00:1450:4001:c01::67]", 80), ), # More IPv6 from http://www.ietf.org/rfc/rfc2732.txt ( "http://[fedc:ba98:7654:3210:fedc:ba98:7654:3210]:8000/index.html", ("http", "[fedc:ba98:7654:3210:fedc:ba98:7654:3210]", 8000), ), ( "http://[1080:0:0:0:8:800:200c:417a]/index.html", ("http", "[1080:0:0:0:8:800:200c:417a]", None), ), ("http://[3ffe:2a00:100:7031::1]", ("http", "[3ffe:2a00:100:7031::1]", None)), ( "http://[1080::8:800:200c:417a]/foo", ("http", "[1080::8:800:200c:417a]", None), ), ("http://[::192.9.5.5]/ipng", ("http", "[::192.9.5.5]", None)), ( "http://[::ffff:129.144.52.38]:42/index.html", ("http", "[::ffff:129.144.52.38]", 42), ), ( "http://[2010:836b:4179::836b:4179]", ("http", "[2010:836b:4179::836b:4179]", None), ), # Scoped IPv6 (with ZoneID), both RFC 6874 compliant and not. ("http://[a::b%25zone]", ("http", "[a::b%zone]", None)), ("http://[a::b%zone]", ("http", "[a::b%zone]", None)), # Hosts ("HTTP://GOOGLE.COM/mail/", ("http", "google.com", None)), ("GOogle.COM/mail", ("http", "google.com", None)), ("HTTP://GoOgLe.CoM:8000/mail/", ("http", "google.com", 8000)), ("HTTP://user:password@EXAMPLE.COM:1234", ("http", "example.com", 1234)), ("173.194.35.7", ("http", "173.194.35.7", None)), ("HTTP://173.194.35.7", ("http", "173.194.35.7", None)), ( "HTTP://[2a00:1450:4001:c01::67]:80/test", ("http", "[2a00:1450:4001:c01::67]", 80), ), ( "HTTP://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:8000/index.html", ("http", "[fedc:ba98:7654:3210:fedc:ba98:7654:3210]", 8000), ), ( "HTTPS://[1080:0:0:0:8:800:200c:417A]/index.html", ("https", "[1080:0:0:0:8:800:200c:417a]", None), ), ("abOut://eXamPlE.com?info=1", ("about", "eXamPlE.com", None)), ( "http+UNIX://%2fvar%2frun%2fSOCKET/path", ("http+unix", "%2fvar%2frun%2fSOCKET", None), ), ] @pytest.mark.parametrize(["url", "scheme_host_port"], url_host_map) def test_scheme_host_port( self, url: str, scheme_host_port: tuple[str, str, int | None] ) -> None: parsed_url = parse_url(url) scheme, host, port = scheme_host_port assert (parsed_url.scheme or "http") == scheme assert parsed_url.hostname == parsed_url.host == host assert parsed_url.port == port def test_encode_invalid_chars_none(self) -> None: assert _encode_invalid_chars(None, set()) is None @pytest.mark.parametrize( "url", [ "http://google.com:foo", "http://::1/", "http://::1:80/", "http://google.com:-80", "http://google.com:65536", "http://google.com:\xb2\xb2", # \xb2 = ^2 # Invalid IDNA labels "http://\uD7FF.com", "http://❤️", # Unicode surrogates "http://\uD800.com", "http://\uDC00.com", ], ) def test_invalid_url(self, url: str) -> None: with pytest.raises(LocationParseError): parse_url(url) @pytest.mark.parametrize( "url, expected_normalized_url", [ ("HTTP://GOOGLE.COM/MAIL/", "http://google.com/MAIL/"), ( "http://user@domain.com:password@example.com/~tilde@?@", "http://user%40domain.com:password@example.com/~tilde@?@", ), ( "HTTP://JeremyCline:Hunter2@Example.com:8080/", "http://JeremyCline:Hunter2@example.com:8080/", ), ("HTTPS://Example.Com/?Key=Value", "https://example.com/?Key=Value"), ("Https://Example.Com/#Fragment", "https://example.com/#Fragment"), # IPv6 addresses with zone IDs. Both RFC 6874 (%25) as well as # non-standard (unquoted %) variants. ("[::1%zone]", "[::1%zone]"), ("[::1%25zone]", "[::1%zone]"), ("[::1%25]", "[::1%25]"), ("[::Ff%etH0%Ff]/%ab%Af", "[::ff%etH0%FF]/%AB%AF"), ( "http://user:pass@[AaAa::Ff%25etH0%Ff]/%ab%Af", "http://user:pass@[aaaa::ff%etH0%FF]/%AB%AF", ), # Invalid characters for the query/fragment getting encoded ( 'http://google.com/p[]?parameter[]="hello"#fragment#', "http://google.com/p%5B%5D?parameter%5B%5D=%22hello%22#fragment%23", ), # Percent encoding isn't applied twice despite '%' being invalid # but the percent encoding is still normalized. ( "http://google.com/p%5B%5d?parameter%5b%5D=%22hello%22#fragment%23", "http://google.com/p%5B%5D?parameter%5B%5D=%22hello%22#fragment%23", ), ], ) def test_parse_url_normalization( self, url: str, expected_normalized_url: str ) -> None: """Assert parse_url normalizes the scheme/host, and only the scheme/host""" actual_normalized_url = parse_url(url).url assert actual_normalized_url == expected_normalized_url @pytest.mark.parametrize("char", [chr(i) for i in range(0x00, 0x21)] + ["\x7F"]) def test_control_characters_are_percent_encoded(self, char: str) -> None: percent_char = "%" + (hex(ord(char))[2:].zfill(2).upper()) url = parse_url( f"http://user{char}@example.com/path{char}?query{char}#fragment{char}" ) assert url == Url( "http", auth="user" + percent_char, host="example.com", path="/path" + percent_char, query="query" + percent_char, fragment="fragment" + percent_char, ) parse_url_host_map = [ ("http://google.com/mail", Url("http", host="google.com", path="/mail")), ("http://google.com/mail/", Url("http", host="google.com", path="/mail/")), ("http://google.com/mail", Url("http", host="google.com", path="mail")), ("google.com/mail", Url(host="google.com", path="/mail")), ("http://google.com/", Url("http", host="google.com", path="/")), ("http://google.com", Url("http", host="google.com")), ("http://google.com?foo", Url("http", host="google.com", path="", query="foo")), # Path/query/fragment ("", Url()), ("/", Url(path="/")), ("#?/!google.com/?foo", Url(path="", fragment="?/!google.com/?foo")), ("/foo", Url(path="/foo")), ("/foo?bar=baz", Url(path="/foo", query="bar=baz")), ( "/foo?bar=baz#banana?apple/orange", Url(path="/foo", query="bar=baz", fragment="banana?apple/orange"), ), ( "/redirect?target=http://localhost:61020/", Url(path="redirect", query="target=http://localhost:61020/"), ), # Port ("http://google.com/", Url("http", host="google.com", path="/")), ("http://google.com:80/", Url("http", host="google.com", port=80, path="/")), ("http://google.com:80", Url("http", host="google.com", port=80)), # Auth ( "http://foo:bar@localhost/", Url("http", auth="foo:bar", host="localhost", path="/"), ), ("http://foo@localhost/", Url("http", auth="foo", host="localhost", path="/")), ( "http://foo:bar@localhost/", Url("http", auth="foo:bar", host="localhost", path="/"), ), ] non_round_tripping_parse_url_host_map = [ # Path/query/fragment ("?", Url(path="", query="")), ("#", Url(path="", fragment="")), # Path normalization ("/abc/../def", Url(path="/def")), # Empty Port ("http://google.com:", Url("http", host="google.com")), ("http://google.com:/", Url("http", host="google.com", path="/")), # Uppercase IRI ( "http://Königsgäßchen.de/straße", Url("http", host="xn--knigsgchen-b4a3dun.de", path="/stra%C3%9Fe"), ), # Percent-encode in userinfo ( "http://user@email.com:password@example.com/", Url("http", auth="user%40email.com:password", host="example.com", path="/"), ), ( 'http://user":quoted@example.com/', Url("http", auth="user%22:quoted", host="example.com", path="/"), ), # Unicode Surrogates ("http://google.com/\uD800", Url("http", host="google.com", path="%ED%A0%80")), ( "http://google.com?q=\uDC00", Url("http", host="google.com", path="", query="q=%ED%B0%80"), ), ( "http://google.com#\uDC00", Url("http", host="google.com", path="", fragment="%ED%B0%80"), ), ] @pytest.mark.parametrize( "url, expected_url", chain(parse_url_host_map, non_round_tripping_parse_url_host_map), ) def test_parse_url(self, url: str, expected_url: Url) -> None: returned_url = parse_url(url) assert returned_url == expected_url assert returned_url.hostname == returned_url.host == expected_url.host @pytest.mark.parametrize("url, expected_url", parse_url_host_map) def test_unparse_url(self, url: str, expected_url: Url) -> None: assert url == expected_url.url @pytest.mark.parametrize( ["url", "expected_url"], [ # RFC 3986 5.2.4 ("/abc/../def", Url(path="/def")), ("/..", Url(path="/")), ("/./abc/./def/", Url(path="/abc/def/")), ("/.", Url(path="/")), ("/./", Url(path="/")), ("/abc/./.././d/././e/.././f/./../../ghi", Url(path="/ghi")), ], ) def test_parse_and_normalize_url_paths(self, url: str, expected_url: Url) -> None: actual_url = parse_url(url) assert actual_url == expected_url assert actual_url.url == expected_url.url def test_parse_url_invalid_IPv6(self) -> None: with pytest.raises(LocationParseError): parse_url("[::1") def test_parse_url_negative_port(self) -> None: with pytest.raises(LocationParseError): parse_url("https://www.google.com:-80/") def test_parse_url_remove_leading_zeros(self) -> None: url = parse_url("https://example.com:0000000000080") assert url.port == 80 def test_parse_url_only_zeros(self) -> None: url = parse_url("https://example.com:0") assert url.port == 0 url = parse_url("https://example.com:000000000000") assert url.port == 0 def test_Url_str(self) -> None: U = Url("http", host="google.com") assert str(U) == U.url request_uri_map = [ ("http://google.com/mail", "/mail"), ("http://google.com/mail/", "/mail/"), ("http://google.com/", "/"), ("http://google.com", "/"), ("", "/"), ("/", "/"), ("?", "/?"), ("#", "/"), ("/foo?bar=baz", "/foo?bar=baz"), ] @pytest.mark.parametrize("url, expected_request_uri", request_uri_map) def test_request_uri(self, url: str, expected_request_uri: str) -> None: returned_url = parse_url(url) assert returned_url.request_uri == expected_request_uri url_authority_map: list[tuple[str, str | None]] = [ ("http://user:pass@google.com/mail", "user:pass@google.com"), ("http://user:pass@google.com:80/mail", "user:pass@google.com:80"), ("http://user@google.com:80/mail", "user@google.com:80"), ("http://user:pass@192.168.1.1/path", "user:pass@192.168.1.1"), ("http://user:pass@192.168.1.1:80/path", "user:pass@192.168.1.1:80"), ("http://user@192.168.1.1:80/path", "user@192.168.1.1:80"), ("http://user:pass@[::1]/path", "user:pass@[::1]"), ("http://user:pass@[::1]:80/path", "user:pass@[::1]:80"), ("http://user@[::1]:80/path", "user@[::1]:80"), ("http://user:pass@localhost/path", "user:pass@localhost"), ("http://user:pass@localhost:80/path", "user:pass@localhost:80"), ("http://user@localhost:80/path", "user@localhost:80"), ] url_netloc_map = [ ("http://google.com/mail", "google.com"), ("http://google.com:80/mail", "google.com:80"), ("http://192.168.0.1/path", "192.168.0.1"), ("http://192.168.0.1:80/path", "192.168.0.1:80"), ("http://[::1]/path", "[::1]"), ("http://[::1]:80/path", "[::1]:80"), ("http://localhost", "localhost"), ("http://localhost:80", "localhost:80"), ("google.com/foobar", "google.com"), ("google.com:12345", "google.com:12345"), ("/", None), ] combined_netloc_authority_map = url_authority_map + url_netloc_map # We compose this list due to variances between parse_url # and urlparse when URIs don't provide a scheme. url_authority_with_schemes_map = [ u for u in combined_netloc_authority_map if u[0].startswith("http") ] @pytest.mark.parametrize("url, expected_authority", combined_netloc_authority_map) def test_authority(self, url: str, expected_authority: str | None) -> None: assert parse_url(url).authority == expected_authority @pytest.mark.parametrize("url, expected_authority", url_authority_with_schemes_map) def test_authority_matches_urllib_netloc( self, url: str, expected_authority: str | None ) -> None: """Validate this matches the behavior of urlparse().netloc""" assert urlparse(url).netloc == expected_authority @pytest.mark.parametrize("url, expected_netloc", url_netloc_map) def test_netloc(self, url: str, expected_netloc: str | None) -> None: assert parse_url(url).netloc == expected_netloc url_vulnerabilities = [ # urlparse doesn't follow RFC 3986 Section 3.2 ( "http://google.com#@evil.com/", Url("http", host="google.com", path="", fragment="@evil.com/"), ), # CVE-2016-5699 ( "http://127.0.0.1%0d%0aConnection%3a%20keep-alive", Url("http", host="127.0.0.1%0d%0aconnection%3a%20keep-alive"), ), # NodeJS unicode -> double dot ( "http://google.com/\uff2e\uff2e/abc", Url("http", host="google.com", path="/%EF%BC%AE%EF%BC%AE/abc"), ), # Scheme without :// ( "javascript:a='@google.com:12345/';alert(0)", Url(scheme="javascript", path="a='@google.com:12345/';alert(0)"), ), ("//google.com/a/b/c", Url(host="google.com", path="/a/b/c")), # International URLs ( "http://ヒ:キ@ヒ.abc.ニ/ヒ?キ#ワ", Url( "http", host="xn--pdk.abc.xn--idk", auth="%E3%83%92:%E3%82%AD", path="/%E3%83%92", query="%E3%82%AD", fragment="%E3%83%AF", ), ), # Injected headers (CVE-2016-5699, CVE-2019-9740, CVE-2019-9947) ( "10.251.0.83:7777?a=1 HTTP/1.1\r\nX-injected: header", Url( host="10.251.0.83", port=7777, path="", query="a=1%20HTTP/1.1%0D%0AX-injected:%20header", ), ), ( "http://127.0.0.1:6379?\r\nSET test failure12\r\n:8080/test/?test=a", Url( scheme="http", host="127.0.0.1", port=6379, path="", query="%0D%0ASET%20test%20failure12%0D%0A:8080/test/?test=a", ), ), # See https://bugs.xdavidhu.me/google/2020/03/08/the-unexpected-google-wide-domain-check-bypass/ ( "https://user:pass@xdavidhu.me\\test.corp.google.com:8080/path/to/something?param=value#hash", Url( scheme="https", auth="user:pass", host="xdavidhu.me", path="/%5Ctest.corp.google.com:8080/path/to/something", query="param=value", fragment="hash", ), ), # Tons of '@' causing backtracking pytest.param( "https://" + ("@" * 10000) + "[", False, id="Tons of '@' causing backtracking 1", ), pytest.param( "https://user:" + ("@" * 10000) + "example.com", Url( scheme="https", auth="user:" + ("%40" * 9999), host="example.com", ), id="Tons of '@' causing backtracking 2", ), ] @pytest.mark.parametrize("url, expected_url", url_vulnerabilities) def test_url_vulnerabilities( self, url: str, expected_url: typing.Literal[False] | Url ) -> None: if expected_url is False: with pytest.raises(LocationParseError): parse_url(url) else: assert parse_url(url) == expected_url def test_parse_url_bytes_type_error(self) -> None: with pytest.raises(TypeError): parse_url(b"https://www.google.com/") # type: ignore[arg-type] @pytest.mark.parametrize( "kwargs, expected", [ pytest.param( {"accept_encoding": True}, {"accept-encoding": "gzip,deflate,br,zstd"}, marks=[onlyBrotli(), onlyZstd()], # type: ignore[list-item] ), pytest.param( {"accept_encoding": True}, {"accept-encoding": "gzip,deflate,br"}, marks=[onlyBrotli(), notZstd()], # type: ignore[list-item] ), pytest.param( {"accept_encoding": True}, {"accept-encoding": "gzip,deflate,zstd"}, marks=[notBrotli(), onlyZstd()], # type: ignore[list-item] ), pytest.param( {"accept_encoding": True}, {"accept-encoding": "gzip,deflate"}, marks=[notBrotli(), notZstd()], # type: ignore[list-item] ), ({"accept_encoding": "foo,bar"}, {"accept-encoding": "foo,bar"}), ({"accept_encoding": ["foo", "bar"]}, {"accept-encoding": "foo,bar"}), pytest.param( {"accept_encoding": True, "user_agent": "banana"}, {"accept-encoding": "gzip,deflate,br,zstd", "user-agent": "banana"}, marks=[onlyBrotli(), onlyZstd()], # type: ignore[list-item] ), pytest.param( {"accept_encoding": True, "user_agent": "banana"}, {"accept-encoding": "gzip,deflate,br", "user-agent": "banana"}, marks=[onlyBrotli(), notZstd()], # type: ignore[list-item] ), pytest.param( {"accept_encoding": True, "user_agent": "banana"}, {"accept-encoding": "gzip,deflate,zstd", "user-agent": "banana"}, marks=[notBrotli(), onlyZstd()], # type: ignore[list-item] ), pytest.param( {"accept_encoding": True, "user_agent": "banana"}, {"accept-encoding": "gzip,deflate", "user-agent": "banana"}, marks=[notBrotli(), notZstd()], # type: ignore[list-item] ), ({"user_agent": "banana"}, {"user-agent": "banana"}), ({"keep_alive": True}, {"connection": "keep-alive"}), ({"basic_auth": "foo:bar"}, {"authorization": "Basic Zm9vOmJhcg=="}), ( {"proxy_basic_auth": "foo:bar"}, {"proxy-authorization": "Basic Zm9vOmJhcg=="}, ), ({"disable_cache": True}, {"cache-control": "no-cache"}), ], ) def test_make_headers( self, kwargs: dict[str, bool | str], expected: dict[str, str] ) -> None: assert make_headers(**kwargs) == expected # type: ignore[arg-type] def test_rewind_body(self) -> None: body = io.BytesIO(b"test data") assert body.read() == b"test data" # Assert the file object has been consumed assert body.read() == b"" # Rewind it back to just be b'data' rewind_body(body, 5) assert body.read() == b"data" def test_rewind_body_failed_tell(self) -> None: body = io.BytesIO(b"test data") body.read() # Consume body # Simulate failed tell() body_pos = _FAILEDTELL with pytest.raises(UnrewindableBodyError): rewind_body(body, body_pos) def test_rewind_body_bad_position(self) -> None: body = io.BytesIO(b"test data") body.read() # Consume body # Pass non-integer position with pytest.raises(ValueError): rewind_body(body, body_pos=None) # type: ignore[arg-type] with pytest.raises(ValueError): rewind_body(body, body_pos=object()) # type: ignore[arg-type] def test_rewind_body_failed_seek(self) -> None: class BadSeek(io.StringIO): def seek(self, offset: int, whence: int = 0) -> typing.NoReturn: raise OSError with pytest.raises(UnrewindableBodyError): rewind_body(BadSeek(), body_pos=2) def test_add_stderr_logger(self) -> None: handler = add_stderr_logger(level=logging.INFO) # Don't actually print debug logger = logging.getLogger("urllib3") assert handler in logger.handlers logger.debug("Testing add_stderr_logger") logger.removeHandler(handler) def test_disable_warnings(self) -> None: with warnings.catch_warnings(record=True) as w: clear_warnings() warnings.simplefilter("default", InsecureRequestWarning) warnings.warn("This is a test.", InsecureRequestWarning) assert len(w) == 1 disable_warnings() warnings.warn("This is a test.", InsecureRequestWarning) assert len(w) == 1 def _make_time_pass( self, seconds: int, timeout: Timeout, time_mock: Mock ) -> Timeout: """Make some time pass for the timeout object""" time_mock.return_value = TIMEOUT_EPOCH timeout.start_connect() time_mock.return_value = TIMEOUT_EPOCH + seconds return timeout @pytest.mark.parametrize( "kwargs, message", [ ({"total": -1}, "less than"), ({"connect": 2, "total": -1}, "less than"), ({"read": -1}, "less than"), ({"connect": False}, "cannot be a boolean"), ({"read": True}, "cannot be a boolean"), ({"connect": 0}, "less than or equal"), ({"read": "foo"}, "int, float or None"), ({"read": "1.0"}, "int, float or None"), ], ) def test_invalid_timeouts( self, kwargs: dict[str, int | bool], message: str ) -> None: with pytest.raises(ValueError, match=message): Timeout(**kwargs) @patch("time.monotonic") def test_timeout(self, time_monotonic: MagicMock) -> None: timeout = Timeout(total=3) # make 'no time' elapse timeout = self._make_time_pass( seconds=0, timeout=timeout, time_mock=time_monotonic ) assert timeout.read_timeout == 3 assert timeout.connect_timeout == 3 timeout = Timeout(total=3, connect=2) assert timeout.connect_timeout == 2 timeout = Timeout() assert timeout.connect_timeout == _DEFAULT_TIMEOUT # Connect takes 5 seconds, leaving 5 seconds for read timeout = Timeout(total=10, read=7) timeout = self._make_time_pass( seconds=5, timeout=timeout, time_mock=time_monotonic ) assert timeout.read_timeout == 5 # Connect takes 2 seconds, read timeout still 7 seconds timeout = Timeout(total=10, read=7) timeout = self._make_time_pass( seconds=2, timeout=timeout, time_mock=time_monotonic ) assert timeout.read_timeout == 7 timeout = Timeout(total=10, read=7) assert timeout.read_timeout == 7 timeout = Timeout(total=None, read=None, connect=None) assert timeout.connect_timeout is None assert timeout.read_timeout is None assert timeout.total is None timeout = Timeout(5) assert timeout.total == 5 def test_timeout_default_resolve(self) -> None: """The timeout default is resolved when read_timeout is accessed.""" timeout = Timeout() with patch("urllib3.util.timeout.getdefaulttimeout", return_value=2): assert timeout.read_timeout == 2 with patch("urllib3.util.timeout.getdefaulttimeout", return_value=3): assert timeout.read_timeout == 3 def test_timeout_str(self) -> None: timeout = Timeout(connect=1, read=2, total=3) assert str(timeout) == "Timeout(connect=1, read=2, total=3)" timeout = Timeout(connect=1, read=None, total=3) assert str(timeout) == "Timeout(connect=1, read=None, total=3)" @patch("time.monotonic") def test_timeout_elapsed(self, time_monotonic: MagicMock) -> None: time_monotonic.return_value = TIMEOUT_EPOCH timeout = Timeout(total=3) with pytest.raises(TimeoutStateError): timeout.get_connect_duration() timeout.start_connect() with pytest.raises(TimeoutStateError): timeout.start_connect() time_monotonic.return_value = TIMEOUT_EPOCH + 2 assert timeout.get_connect_duration() == 2 time_monotonic.return_value = TIMEOUT_EPOCH + 37 assert timeout.get_connect_duration() == 37 def test_is_fp_closed_object_supports_closed(self) -> None: class ClosedFile: @property def closed(self) -> typing.Literal[True]: return True assert is_fp_closed(ClosedFile()) def test_is_fp_closed_object_has_none_fp(self) -> None: class NoneFpFile: @property def fp(self) -> None: return None assert is_fp_closed(NoneFpFile()) def test_is_fp_closed_object_has_fp(self) -> None: class FpFile: @property def fp(self) -> typing.Literal[True]: return True assert not is_fp_closed(FpFile()) def test_is_fp_closed_object_has_neither_fp_nor_closed(self) -> None: class NotReallyAFile: pass with pytest.raises(ValueError): is_fp_closed(NotReallyAFile()) def test_has_ipv6_disabled_on_compile(self) -> None: with patch("socket.has_ipv6", False): assert not _has_ipv6("::1") def test_has_ipv6_enabled_but_fails(self) -> None: with patch("socket.has_ipv6", True): with patch("socket.socket") as mock: instance = mock.return_value instance.bind = Mock(side_effect=Exception("No IPv6 here!")) assert not _has_ipv6("::1") def test_has_ipv6_enabled_and_working(self) -> None: with patch("socket.has_ipv6", True): with patch("socket.socket") as mock: instance = mock.return_value instance.bind.return_value = True assert _has_ipv6("::1") def test_ip_family_ipv6_enabled(self) -> None: with patch("urllib3.util.connection.HAS_IPV6", True): assert allowed_gai_family() == socket.AF_UNSPEC def test_ip_family_ipv6_disabled(self) -> None: with patch("urllib3.util.connection.HAS_IPV6", False): assert allowed_gai_family() == socket.AF_INET @pytest.mark.parametrize("headers", [b"foo", None, object]) def test_assert_header_parsing_throws_typeerror_with_non_headers( self, headers: bytes | object | None ) -> None: with pytest.raises(TypeError): assert_header_parsing(headers) # type: ignore[arg-type] def test_connection_requires_http_tunnel_no_proxy(self) -> None: assert not connection_requires_http_tunnel( proxy_url=None, proxy_config=None, destination_scheme=None ) def test_connection_requires_http_tunnel_http_proxy(self) -> None: proxy = parse_url("http://proxy:8080") proxy_config = ProxyConfig( ssl_context=None, use_forwarding_for_https=False, assert_hostname=None, assert_fingerprint=None, ) destination_scheme = "http" assert not connection_requires_http_tunnel( proxy, proxy_config, destination_scheme ) destination_scheme = "https" assert connection_requires_http_tunnel(proxy, proxy_config, destination_scheme) def test_connection_requires_http_tunnel_https_proxy(self) -> None: proxy = parse_url("https://proxy:8443") proxy_config = ProxyConfig( ssl_context=None, use_forwarding_for_https=False, assert_hostname=None, assert_fingerprint=None, ) destination_scheme = "http" assert not connection_requires_http_tunnel( proxy, proxy_config, destination_scheme ) def test_assert_header_parsing_no_error_on_multipart(self) -> None: from http import client header_msg = io.BytesIO() header_msg.write( b'Content-Type: multipart/encrypted;protocol="application/' b'HTTP-SPNEGO-session-encrypted";boundary="Encrypted Boundary"' b"\nServer: Microsoft-HTTPAPI/2.0\nDate: Fri, 16 Aug 2019 19:28:01 GMT" b"\nContent-Length: 1895\n\n\n" ) header_msg.seek(0) assert_header_parsing(client.parse_headers(header_msg)) @pytest.mark.parametrize("host", [".localhost", "...", "t" * 64]) def test_create_connection_with_invalid_idna_labels(self, host: str) -> None: with pytest.raises( LocationParseError, match=f"Failed to parse: '{host}', label empty or too long", ): create_connection((host, 80)) @pytest.mark.parametrize( "host", [ "a.example.com", "localhost.", "[dead::beef]", "[dead::beef%en5]", "[dead::beef%en5.]", ], ) @patch("socket.getaddrinfo") @patch("socket.socket") def test_create_connection_with_valid_idna_labels( self, socket: MagicMock, getaddrinfo: MagicMock, host: str ) -> None: getaddrinfo.return_value = [(None, None, None, None, None)] socket.return_value = Mock() create_connection((host, 80)) @patch("socket.getaddrinfo") def test_create_connection_error(self, getaddrinfo: MagicMock) -> None: getaddrinfo.return_value = [] with pytest.raises(OSError, match="getaddrinfo returns an empty list"): create_connection(("example.com", 80)) @patch("socket.getaddrinfo") def test_dnsresolver_forced_error(self, getaddrinfo: MagicMock) -> None: getaddrinfo.side_effect = socket.gaierror() with pytest.raises(socket.gaierror): # dns is valid but we force the error just for the sake of the test create_connection(("example.com", 80)) def test_dnsresolver_expected_error(self) -> None: with pytest.raises(socket.gaierror): # windows: [Errno 11001] getaddrinfo failed in windows # linux: [Errno -2] Name or service not known # macos: [Errno 8] nodename nor servname provided, or not known create_connection(("badhost.invalid", 80)) @patch("socket.getaddrinfo") @patch("socket.socket") def test_create_connection_with_scoped_ipv6( self, socket: MagicMock, getaddrinfo: MagicMock ) -> None: # Check that providing create_connection with a scoped IPv6 address # properly propagates the scope to getaddrinfo, and that the returned # scoped ID makes it to the socket creation call. fake_scoped_sa6 = ("a::b", 80, 0, 42) getaddrinfo.return_value = [ ( socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_TCP, "", fake_scoped_sa6, ) ] socket.return_value = fake_sock = MagicMock() create_connection(("a::b%iface", 80)) assert getaddrinfo.call_args[0][0] == "a::b%iface" fake_sock.connect.assert_called_once_with(fake_scoped_sa6) @pytest.mark.parametrize( "input,params,expected", ( ("test", {}, "test"), # str input (b"test", {}, "test"), # bytes input (b"test", {"encoding": "utf-8"}, "test"), # bytes input with utf-8 (b"test", {"encoding": "ascii"}, "test"), # bytes input with ascii ), ) def test_to_str( self, input: bytes | str, params: dict[str, str], expected: str ) -> None: assert to_str(input, **params) == expected def test_to_str_error(self) -> None: with pytest.raises(TypeError, match="not expecting type int"): to_str(1) # type: ignore[arg-type] @pytest.mark.parametrize( "input,params,expected", ( (b"test", {}, b"test"), # str input ("test", {}, b"test"), # bytes input ("é", {}, b"\xc3\xa9"), # bytes input ("test", {"encoding": "utf-8"}, b"test"), # bytes input with utf-8 ("test", {"encoding": "ascii"}, b"test"), # bytes input with ascii ), ) def test_to_bytes( self, input: bytes | str, params: dict[str, str], expected: bytes ) -> None: assert to_bytes(input, **params) == expected def test_to_bytes_error(self) -> None: with pytest.raises(TypeError, match="not expecting type int"): to_bytes(1) # type: ignore[arg-type] class TestUtilSSL: """Test utils that use an SSL backend.""" @pytest.mark.parametrize( "candidate, requirements", [ (None, ssl.CERT_REQUIRED), (ssl.CERT_NONE, ssl.CERT_NONE), (ssl.CERT_REQUIRED, ssl.CERT_REQUIRED), ("REQUIRED", ssl.CERT_REQUIRED), ("CERT_REQUIRED", ssl.CERT_REQUIRED), ], ) def test_resolve_cert_reqs( self, candidate: int | str | None, requirements: int ) -> None: assert resolve_cert_reqs(candidate) == requirements @pytest.mark.parametrize( "candidate, version", [ (ssl.PROTOCOL_TLSv1, ssl.PROTOCOL_TLSv1), ("PROTOCOL_TLSv1", ssl.PROTOCOL_TLSv1), ("TLSv1", ssl.PROTOCOL_TLSv1), (ssl.PROTOCOL_SSLv23, ssl.PROTOCOL_SSLv23), ], ) def test_resolve_ssl_version(self, candidate: int | str, version: int) -> None: assert resolve_ssl_version(candidate) == version def test_ssl_wrap_socket_loads_the_cert_chain(self) -> None: socket = Mock() mock_context = Mock() ssl_wrap_socket( ssl_context=mock_context, sock=socket, certfile="/path/to/certfile" ) mock_context.load_cert_chain.assert_called_once_with("/path/to/certfile", None) @patch("urllib3.util.ssl_.create_urllib3_context") def test_ssl_wrap_socket_creates_new_context( self, create_urllib3_context: mock.MagicMock ) -> None: socket = Mock() ssl_wrap_socket(socket, cert_reqs=ssl.CERT_REQUIRED) create_urllib3_context.assert_called_once_with(None, 2, ciphers=None) def test_ssl_wrap_socket_loads_verify_locations(self) -> None: socket = Mock() mock_context = Mock() ssl_wrap_socket(ssl_context=mock_context, ca_certs="/path/to/pem", sock=socket) mock_context.load_verify_locations.assert_called_once_with( "/path/to/pem", None, None ) def test_ssl_wrap_socket_loads_certificate_directories(self) -> None: socket = Mock() mock_context = Mock() ssl_wrap_socket( ssl_context=mock_context, ca_cert_dir="/path/to/pems", sock=socket ) mock_context.load_verify_locations.assert_called_once_with( None, "/path/to/pems", None ) def test_ssl_wrap_socket_loads_certificate_data(self) -> None: socket = Mock() mock_context = Mock() ssl_wrap_socket( ssl_context=mock_context, ca_cert_data="TOTALLY PEM DATA", sock=socket ) mock_context.load_verify_locations.assert_called_once_with( None, None, "TOTALLY PEM DATA" ) def _wrap_socket_and_mock_warn( self, sock: socket.socket, server_hostname: str | None ) -> tuple[Mock, MagicMock]: mock_context = Mock() with patch("warnings.warn") as warn: ssl_wrap_socket( ssl_context=mock_context, sock=sock, server_hostname=server_hostname, ) return mock_context, warn def test_ssl_wrap_socket_sni_ip_address_no_warn(self) -> None: """Test that a warning is not made if server_hostname is an IP address.""" sock = Mock() context, warn = self._wrap_socket_and_mock_warn(sock, "8.8.8.8") context.wrap_socket.assert_called_once_with(sock, server_hostname="8.8.8.8") warn.assert_not_called() def test_ssl_wrap_socket_sni_none_no_warn(self) -> None: """Test that a warning is not made if server_hostname is not given.""" sock = Mock() context, warn = self._wrap_socket_and_mock_warn(sock, None) context.wrap_socket.assert_called_once_with(sock, server_hostname=None) warn.assert_not_called() @pytest.mark.parametrize( "openssl_version, openssl_version_number, implementation_name, version_info, pypy_version_info, reliable", [ # OpenSSL and Python OK -> reliable ("OpenSSL 1.1.1", 0x101010CF, "cpython", (3, 9, 3), None, True), # Python OK -> reliable ("OpenSSL 1.1.1", 0x10101000, "cpython", (3, 9, 3), None, True), # PyPy: depends on the version ("OpenSSL 1.1.1", 0x10101000, "pypy", (3, 9, 9), (7, 3, 7), False), ("OpenSSL 1.1.1", 0x101010CF, "pypy", (3, 8, 12), (7, 3, 8), True), # OpenSSL OK -> reliable ("OpenSSL 1.1.1", 0x101010CF, "cpython", (3, 9, 2), None, True), # not OpenSSSL -> unreliable ("LibreSSL 2.8.3", 0x101010CF, "cpython", (3, 10, 0), None, False), # old OpenSSL and old Python, unreliable ("OpenSSL 1.1.0", 0x10101000, "cpython", (3, 9, 2), None, False), ], ) def test_is_has_never_check_common_name_reliable( self, openssl_version: str, openssl_version_number: int, implementation_name: str, version_info: _TYPE_VERSION_INFO, pypy_version_info: _TYPE_VERSION_INFO | None, reliable: bool, ) -> None: assert ( _is_has_never_check_common_name_reliable( openssl_version, openssl_version_number, implementation_name, version_info, pypy_version_info, ) == reliable ) idna_blocker = ImportBlocker("idna") module_stash = ModuleStash("urllib3") class TestUtilWithoutIdna: @classmethod def setup_class(cls) -> None: sys.modules.pop("idna", None) module_stash.stash() sys.meta_path.insert(0, idna_blocker) @classmethod def teardown_class(cls) -> None: sys.meta_path.remove(idna_blocker) module_stash.pop() def test_parse_url_without_idna(self) -> None: url = "http://\uD7FF.com" with pytest.raises(LocationParseError, match=f"Failed to parse: {url}"): parse_url(url) test_connection.py 0000644 00000030727 15025234504 0010325 0 ustar 00 from __future__ import annotations import datetime import socket import typing from http.client import ResponseNotReady from unittest import mock import pytest from urllib3.connection import ( # type: ignore[attr-defined] RECENT_DATE, CertificateError, HTTPConnection, HTTPSConnection, _match_hostname, _url_from_connection, _wrap_proxy_error, ) from urllib3.exceptions import HTTPError, ProxyError, SSLError from urllib3.util import ssl_ from urllib3.util.request import SKIP_HEADER from urllib3.util.ssl_match_hostname import ( CertificateError as ImplementationCertificateError, ) from urllib3.util.ssl_match_hostname import _dnsname_match, match_hostname if typing.TYPE_CHECKING: from urllib3.util.ssl_ import _TYPE_PEER_CERT_RET_DICT class TestConnection: """ Tests in this suite should not make any network requests or connections. """ def test_match_hostname_no_cert(self) -> None: cert = None asserted_hostname = "foo" with pytest.raises(ValueError): _match_hostname(cert, asserted_hostname) def test_match_hostname_empty_cert(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = {} asserted_hostname = "foo" with pytest.raises(ValueError): _match_hostname(cert, asserted_hostname) def test_match_hostname_match(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = {"subjectAltName": (("DNS", "foo"),)} asserted_hostname = "foo" _match_hostname(cert, asserted_hostname) def test_match_hostname_mismatch(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = {"subjectAltName": (("DNS", "foo"),)} asserted_hostname = "bar" try: with mock.patch("urllib3.connection.log.warning") as mock_log: _match_hostname(cert, asserted_hostname) except CertificateError as e: assert "hostname 'bar' doesn't match 'foo'" in str(e) mock_log.assert_called_once_with( "Certificate did not match expected hostname: %s. Certificate: %s", "bar", {"subjectAltName": (("DNS", "foo"),)}, ) assert e._peer_cert == cert # type: ignore[attr-defined] def test_match_hostname_no_dns(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = {"subjectAltName": (("DNS", ""),)} asserted_hostname = "bar" try: with mock.patch("urllib3.connection.log.warning") as mock_log: _match_hostname(cert, asserted_hostname) except CertificateError as e: assert "hostname 'bar' doesn't match ''" in str(e) mock_log.assert_called_once_with( "Certificate did not match expected hostname: %s. Certificate: %s", "bar", {"subjectAltName": (("DNS", ""),)}, ) assert e._peer_cert == cert # type: ignore[attr-defined] def test_match_hostname_startwith_wildcard(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = {"subjectAltName": (("DNS", "*"),)} asserted_hostname = "foo" _match_hostname(cert, asserted_hostname) def test_match_hostname_dnsname(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = { "subjectAltName": (("DNS", "xn--p1b6ci4b4b3a*.xn--11b5bs8d"),) } asserted_hostname = "xn--p1b6ci4b4b3a*.xn--11b5bs8d" _match_hostname(cert, asserted_hostname) def test_match_hostname_include_wildcard(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = {"subjectAltName": (("DNS", "foo*"),)} asserted_hostname = "foobar" _match_hostname(cert, asserted_hostname) def test_match_hostname_more_than_one_dnsname_error(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = { "subjectAltName": (("DNS", "foo*"), ("DNS", "fo*")) } asserted_hostname = "bar" with pytest.raises(CertificateError, match="doesn't match either of"): _match_hostname(cert, asserted_hostname) def test_dnsname_match_include_more_than_one_wildcard_error(self) -> None: with pytest.raises(CertificateError, match="too many wildcards in certificate"): _dnsname_match("foo**", "foobar") def test_match_hostname_ignore_common_name(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = {"subject": ((("commonName", "foo"),),)} asserted_hostname = "foo" with pytest.raises( ImplementationCertificateError, match="no appropriate subjectAltName fields were found", ): match_hostname(cert, asserted_hostname) def test_match_hostname_check_common_name(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = {"subject": ((("commonName", "foo"),),)} asserted_hostname = "foo" match_hostname(cert, asserted_hostname, True) def test_match_hostname_ip_address(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = { "subjectAltName": (("IP Address", "1.1.1.1"),) } asserted_hostname = "1.1.1.2" try: with mock.patch("urllib3.connection.log.warning") as mock_log: _match_hostname(cert, asserted_hostname) except CertificateError as e: assert "hostname '1.1.1.2' doesn't match '1.1.1.1'" in str(e) mock_log.assert_called_once_with( "Certificate did not match expected hostname: %s. Certificate: %s", "1.1.1.2", {"subjectAltName": (("IP Address", "1.1.1.1"),)}, ) assert e._peer_cert == cert # type: ignore[attr-defined] @pytest.mark.parametrize( ["asserted_hostname", "san_ip"], [ ("1:2::3:4", "1:2:0:0:0:0:3:4"), ("1:2:0:0::3:4", "1:2:0:0:0:0:3:4"), ("::0.1.0.2", "0:0:0:0:0:0:1:2"), ("::1%42", "0:0:0:0:0:0:0:1"), ("::2%iface", "0:0:0:0:0:0:0:2"), ], ) def test_match_hostname_ip_address_ipv6( self, asserted_hostname: str, san_ip: str ) -> None: """Check that hostname matches follow RFC 9110 rules for IPv6.""" cert: _TYPE_PEER_CERT_RET_DICT = {"subjectAltName": (("IP Address", san_ip),)} match_hostname(cert, asserted_hostname) def test_match_hostname_ip_address_ipv6_doesnt_match(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = { "subjectAltName": (("IP Address", "1:2::2:1"),) } asserted_hostname = "1:2::2:2" try: with mock.patch("urllib3.connection.log.warning") as mock_log: _match_hostname(cert, asserted_hostname) except CertificateError as e: assert "hostname '1:2::2:2' doesn't match '1:2::2:1'" in str(e) mock_log.assert_called_once_with( "Certificate did not match expected hostname: %s. Certificate: %s", "1:2::2:2", {"subjectAltName": (("IP Address", "1:2::2:1"),)}, ) assert e._peer_cert == cert # type: ignore[attr-defined] def test_match_hostname_dns_with_brackets_doesnt_match(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = { "subjectAltName": ( ("DNS", "localhost"), ("IP Address", "localhost"), ) } asserted_hostname = "[localhost]" with pytest.raises(CertificateError) as e: _match_hostname(cert, asserted_hostname) assert ( "hostname '[localhost]' doesn't match either of 'localhost', 'localhost'" in str(e.value) ) def test_match_hostname_ip_address_ipv6_brackets(self) -> None: cert: _TYPE_PEER_CERT_RET_DICT = { "subjectAltName": (("IP Address", "1:2::2:1"),) } asserted_hostname = "[1:2::2:1]" # Assert no error is raised _match_hostname(cert, asserted_hostname) def test_recent_date(self) -> None: # This test is to make sure that the RECENT_DATE value # doesn't get too far behind what the current date is. # When this test fails update urllib3.connection.RECENT_DATE # according to the rules defined in that file. two_years = datetime.timedelta(days=365 * 2) assert RECENT_DATE > (datetime.datetime.today() - two_years).date() def test_HTTPSConnection_default_socket_options(self) -> None: conn = HTTPSConnection("not.a.real.host", port=443) assert conn.socket_options == [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] @pytest.mark.parametrize( "proxy_scheme, err_part", [ ("http", "Unable to connect to proxy"), ( "https", "Unable to connect to proxy. Your proxy appears to only use HTTP and not HTTPS", ), ], ) def test_wrap_proxy_error(self, proxy_scheme: str, err_part: str) -> None: new_err = _wrap_proxy_error(HTTPError("unknown protocol"), proxy_scheme) assert isinstance(new_err, ProxyError) is True assert err_part in new_err.args[0] def test_url_from_pool(self) -> None: conn = HTTPConnection("google.com", port=80) path = "path?query=foo" assert f"http://google.com:80/{path}" == _url_from_connection(conn, path) def test_getresponse_requires_reponseoptions(self) -> None: conn = HTTPConnection("google.com", port=80) # Should error if a request has not been sent with pytest.raises(ResponseNotReady): conn.getresponse() def test_assert_fingerprint_closes_socket(self) -> None: context = mock.create_autospec(ssl_.SSLContext) context.wrap_socket.return_value.getpeercert.return_value = b"fake cert" conn = HTTPSConnection( "google.com", port=443, assert_fingerprint="AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA", ssl_context=context, ) with mock.patch.object(conn, "_new_conn"): with pytest.raises(SSLError): conn.connect() context.wrap_socket.return_value.close.assert_called_once_with() def test_assert_hostname_closes_socket(self) -> None: context = mock.create_autospec(ssl_.SSLContext) context.wrap_socket.return_value.getpeercert.return_value = { "subjectAltName": (("DNS", "google.com"),) } conn = HTTPSConnection( "google.com", port=443, assert_hostname="example.com", ssl_context=context ) with mock.patch.object(conn, "_new_conn"): with pytest.raises(ImplementationCertificateError): conn.connect() context.wrap_socket.return_value.close.assert_called_once_with() @pytest.mark.parametrize( "accept_encoding", [ "Accept-Encoding", "accept-encoding", b"Accept-Encoding", b"accept-encoding", None, ], ) @pytest.mark.parametrize("host", ["Host", "host", b"Host", b"host", None]) @pytest.mark.parametrize( "user_agent", ["User-Agent", "user-agent", b"User-Agent", b"user-agent", None] ) @pytest.mark.parametrize("chunked", [True, False]) def test_skip_header( self, accept_encoding: str | None, host: str | None, user_agent: str | None, chunked: bool, ) -> None: headers = {} if accept_encoding is not None: headers[accept_encoding] = SKIP_HEADER if host is not None: headers[host] = SKIP_HEADER if user_agent is not None: headers[user_agent] = SKIP_HEADER # When dropping support for Python 3.9, this can be rewritten to parenthesized # context managers with mock.patch("urllib3.util.connection.create_connection"): with mock.patch( "urllib3.connection._HTTPConnection.putheader" ) as http_client_putheader: conn = HTTPConnection("") conn.request("GET", "/headers", headers=headers, chunked=chunked) request_headers = {} for call in http_client_putheader.call_args_list: header, value = call.args request_headers[header] = value if accept_encoding is None: assert "Accept-Encoding" in request_headers else: assert accept_encoding not in request_headers if host is None: assert "Host" in request_headers else: assert host not in request_headers if user_agent is None: assert "User-Agent" in request_headers else: assert user_agent not in request_headers test_connectionpool.py 0000644 00000054372 15025234504 0011221 0 ustar 00 from __future__ import annotations import http.client as httplib import ssl import typing from http.client import HTTPException from queue import Empty from socket import error as SocketError from ssl import SSLError as BaseSSLError from test import SHORT_TIMEOUT from unittest.mock import Mock, patch import pytest from dummyserver.socketserver import DEFAULT_CA from urllib3 import Retry from urllib3.connection import HTTPConnection from urllib3.connectionpool import ( HTTPConnectionPool, HTTPSConnectionPool, _url_from_pool, connection_from_url, ) from urllib3.exceptions import ( ClosedPoolError, EmptyPoolError, FullPoolError, HostChangedError, LocationValueError, MaxRetryError, ProtocolError, ReadTimeoutError, SSLError, TimeoutError, ) from urllib3.response import HTTPResponse from urllib3.util.ssl_match_hostname import CertificateError from urllib3.util.timeout import _DEFAULT_TIMEOUT, Timeout from .test_response import MockChunkedEncodingResponse, MockSock class HTTPUnixConnection(HTTPConnection): def __init__(self, host: str, timeout: int = 60, **kwargs: typing.Any) -> None: super().__init__("localhost") self.unix_socket = host self.timeout = timeout self.sock = None class HTTPUnixConnectionPool(HTTPConnectionPool): scheme = "http+unix" ConnectionCls = HTTPUnixConnection class TestConnectionPool: """ Tests in this suite should exercise the ConnectionPool functionality without actually making any network requests or connections. """ @pytest.mark.parametrize( "a, b", [ ("http://google.com/", "/"), ("http://google.com/", "http://google.com/"), ("http://google.com/", "http://google.com"), ("http://google.com/", "http://google.com/abra/cadabra"), ("http://google.com:42/", "http://google.com:42/abracadabra"), # Test comparison using default ports ("http://google.com:80/", "http://google.com/abracadabra"), ("http://google.com/", "http://google.com:80/abracadabra"), ("https://google.com:443/", "https://google.com/abracadabra"), ("https://google.com/", "https://google.com:443/abracadabra"), ( "http://[2607:f8b0:4005:805::200e%25eth0]/", "http://[2607:f8b0:4005:805::200e%eth0]/", ), ( "https://[2607:f8b0:4005:805::200e%25eth0]:443/", "https://[2607:f8b0:4005:805::200e%eth0]:443/", ), ("http://[::1]/", "http://[::1]"), ( "http://[2001:558:fc00:200:f816:3eff:fef9:b954%lo]/", "http://[2001:558:fc00:200:f816:3eff:fef9:b954%25lo]", ), ], ) def test_same_host(self, a: str, b: str) -> None: with connection_from_url(a) as c: assert c.is_same_host(b) @pytest.mark.parametrize( "a, b", [ ("https://google.com/", "http://google.com/"), ("http://google.com/", "https://google.com/"), ("http://yahoo.com/", "http://google.com/"), ("http://google.com:42", "https://google.com/abracadabra"), ("http://google.com", "https://google.net/"), # Test comparison with default ports ("http://google.com:42", "http://google.com"), ("https://google.com:42", "https://google.com"), ("http://google.com:443", "http://google.com"), ("https://google.com:80", "https://google.com"), ("http://google.com:443", "https://google.com"), ("https://google.com:80", "http://google.com"), ("https://google.com:443", "http://google.com"), ("http://google.com:80", "https://google.com"), # Zone identifiers are unique connection end points and should # never be equivalent. ("http://[dead::beef]", "https://[dead::beef%en5]/"), ], ) def test_not_same_host(self, a: str, b: str) -> None: with connection_from_url(a) as c: assert not c.is_same_host(b) with connection_from_url(b) as c: assert not c.is_same_host(a) @pytest.mark.parametrize( "a, b", [ ("google.com", "/"), ("google.com", "http://google.com/"), ("google.com", "http://google.com"), ("google.com", "http://google.com/abra/cadabra"), # Test comparison using default ports ("google.com", "http://google.com:80/abracadabra"), ], ) def test_same_host_no_port_http(self, a: str, b: str) -> None: # This test was introduced in #801 to deal with the fact that urllib3 # never initializes ConnectionPool objects with port=None. with HTTPConnectionPool(a) as c: assert c.is_same_host(b) @pytest.mark.parametrize( "a, b", [ ("google.com", "/"), ("google.com", "https://google.com/"), ("google.com", "https://google.com"), ("google.com", "https://google.com/abra/cadabra"), # Test comparison using default ports ("google.com", "https://google.com:443/abracadabra"), ], ) def test_same_host_no_port_https(self, a: str, b: str) -> None: # This test was introduced in #801 to deal with the fact that urllib3 # never initializes ConnectionPool objects with port=None. with HTTPSConnectionPool(a) as c: assert c.is_same_host(b) @pytest.mark.parametrize( "a, b", [ ("google.com", "https://google.com/"), ("yahoo.com", "http://google.com/"), ("google.com", "https://google.net/"), ("google.com", "http://google.com./"), ], ) def test_not_same_host_no_port_http(self, a: str, b: str) -> None: with HTTPConnectionPool(a) as c: assert not c.is_same_host(b) with HTTPConnectionPool(b) as c: assert not c.is_same_host(a) @pytest.mark.parametrize( "a, b", [ ("google.com", "http://google.com/"), ("yahoo.com", "https://google.com/"), ("google.com", "https://google.net/"), ("google.com", "https://google.com./"), ], ) def test_not_same_host_no_port_https(self, a: str, b: str) -> None: with HTTPSConnectionPool(a) as c: assert not c.is_same_host(b) with HTTPSConnectionPool(b) as c: assert not c.is_same_host(a) @pytest.mark.parametrize( "a, b", [ ("%2Fvar%2Frun%2Fdocker.sock", "http+unix://%2Fvar%2Frun%2Fdocker.sock"), ("%2Fvar%2Frun%2Fdocker.sock", "http+unix://%2Fvar%2Frun%2Fdocker.sock/"), ( "%2Fvar%2Frun%2Fdocker.sock", "http+unix://%2Fvar%2Frun%2Fdocker.sock/abracadabra", ), ("%2Ftmp%2FTEST.sock", "http+unix://%2Ftmp%2FTEST.sock"), ("%2Ftmp%2FTEST.sock", "http+unix://%2Ftmp%2FTEST.sock/"), ("%2Ftmp%2FTEST.sock", "http+unix://%2Ftmp%2FTEST.sock/abracadabra"), ], ) def test_same_host_custom_protocol(self, a: str, b: str) -> None: with HTTPUnixConnectionPool(a) as c: assert c.is_same_host(b) @pytest.mark.parametrize( "a, b", [ ("%2Ftmp%2Ftest.sock", "http+unix://%2Ftmp%2FTEST.sock"), ("%2Ftmp%2Ftest.sock", "http+unix://%2Ftmp%2FTEST.sock/"), ("%2Ftmp%2Ftest.sock", "http+unix://%2Ftmp%2FTEST.sock/abracadabra"), ("%2Fvar%2Frun%2Fdocker.sock", "http+unix://%2Ftmp%2FTEST.sock"), ], ) def test_not_same_host_custom_protocol(self, a: str, b: str) -> None: with HTTPUnixConnectionPool(a) as c: assert not c.is_same_host(b) def test_max_connections(self) -> None: with HTTPConnectionPool(host="localhost", maxsize=1, block=True) as pool: pool._get_conn(timeout=SHORT_TIMEOUT) with pytest.raises(EmptyPoolError): pool._get_conn(timeout=SHORT_TIMEOUT) with pytest.raises(EmptyPoolError): pool.request("GET", "/", pool_timeout=SHORT_TIMEOUT) assert pool.num_connections == 1 def test_put_conn_when_pool_is_full_nonblocking( self, caplog: pytest.LogCaptureFixture ) -> None: """ If maxsize = n and we _put_conn n + 1 conns, the n + 1th conn will get closed and will not get added to the pool. """ with HTTPConnectionPool(host="localhost", maxsize=1, block=False) as pool: conn1 = pool._get_conn() # pool.pool is empty because we popped the one None that pool.pool was initialized with # but this pool._get_conn call will not raise EmptyPoolError because block is False conn2 = pool._get_conn() with patch.object(conn1, "close") as conn1_close: with patch.object(conn2, "close") as conn2_close: pool._put_conn(conn1) pool._put_conn(conn2) assert conn1_close.called is False assert conn2_close.called is True assert conn1 == pool._get_conn() assert conn2 != pool._get_conn() assert pool.num_connections == 3 assert "Connection pool is full, discarding connection" in caplog.text assert "Connection pool size: 1" in caplog.text def test_put_conn_when_pool_is_full_blocking(self) -> None: """ If maxsize = n and we _put_conn n + 1 conns, the n + 1th conn will cause a FullPoolError. """ with HTTPConnectionPool(host="localhost", maxsize=1, block=True) as pool: conn1 = pool._get_conn() conn2 = pool._new_conn() with patch.object(conn1, "close") as conn1_close: with patch.object(conn2, "close") as conn2_close: pool._put_conn(conn1) with pytest.raises(FullPoolError): pool._put_conn(conn2) assert conn1_close.called is False assert conn2_close.called is True assert conn1 == pool._get_conn() def test_put_conn_closed_pool(self) -> None: with HTTPConnectionPool(host="localhost", maxsize=1, block=True) as pool: conn1 = pool._get_conn() with patch.object(conn1, "close") as conn1_close: pool.close() assert pool.pool is None # Accessing pool.pool will raise AttributeError, which will get # caught and will close conn1 pool._put_conn(conn1) assert conn1_close.called is True def test_exception_str(self) -> None: assert ( str(EmptyPoolError(HTTPConnectionPool(host="localhost"), "Test.")) == "HTTPConnectionPool(host='localhost', port=None): Test." ) def test_retry_exception_str(self) -> None: assert ( str(MaxRetryError(HTTPConnectionPool(host="localhost"), "Test.", None)) == "HTTPConnectionPool(host='localhost', port=None): " "Max retries exceeded with url: Test. (Caused by None)" ) err = SocketError("Test") # using err.__class__ here, as socket.error is an alias for OSError # since Py3.3 and gets printed as this assert ( str(MaxRetryError(HTTPConnectionPool(host="localhost"), "Test.", err)) == "HTTPConnectionPool(host='localhost', port=None): " "Max retries exceeded with url: Test. " "(Caused by %r)" % err ) def test_pool_size(self) -> None: POOL_SIZE = 1 with HTTPConnectionPool( host="localhost", maxsize=POOL_SIZE, block=True ) as pool: def _test( exception: type[BaseException], expect: type[BaseException], reason: type[BaseException] | None = None, ) -> None: with patch.object(pool, "_make_request", side_effect=exception()): with pytest.raises(expect) as excinfo: pool.request("GET", "/") if reason is not None: assert isinstance(excinfo.value.reason, reason) # type: ignore[attr-defined] assert pool.pool is not None assert pool.pool.qsize() == POOL_SIZE # Make sure that all of the exceptions return the connection # to the pool _test(BaseSSLError, MaxRetryError, SSLError) _test(CertificateError, MaxRetryError, SSLError) # The pool should never be empty, and with these two exceptions # being raised, a retry will be triggered, but that retry will # fail, eventually raising MaxRetryError, not EmptyPoolError # See: https://github.com/urllib3/urllib3/issues/76 with patch.object(pool, "_make_request", side_effect=HTTPException()): with pytest.raises(MaxRetryError): pool.request("GET", "/", retries=1, pool_timeout=SHORT_TIMEOUT) assert pool.pool is not None assert pool.pool.qsize() == POOL_SIZE def test_empty_does_not_put_conn(self) -> None: """Do not put None back in the pool if the pool was empty""" with HTTPConnectionPool(host="localhost", maxsize=1, block=True) as pool: with patch.object( pool, "_get_conn", side_effect=EmptyPoolError(pool, "Pool is empty") ): with patch.object( pool, "_put_conn", side_effect=AssertionError("Unexpected _put_conn"), ): with pytest.raises(EmptyPoolError): pool.request("GET", "/") def test_assert_same_host(self) -> None: with connection_from_url("http://google.com:80") as c: with pytest.raises(HostChangedError): c.request("GET", "http://yahoo.com:80", assert_same_host=True) def test_pool_close(self) -> None: pool = connection_from_url("http://google.com:80") # Populate with some connections conn1 = pool._get_conn() conn2 = pool._get_conn() conn3 = pool._get_conn() pool._put_conn(conn1) pool._put_conn(conn2) old_pool_queue = pool.pool pool.close() assert pool.pool is None with pytest.raises(ClosedPoolError): pool._get_conn() pool._put_conn(conn3) with pytest.raises(ClosedPoolError): pool._get_conn() with pytest.raises(Empty): assert old_pool_queue is not None old_pool_queue.get(block=False) def test_pool_close_twice(self) -> None: pool = connection_from_url("http://google.com:80") # Populate with some connections conn1 = pool._get_conn() conn2 = pool._get_conn() pool._put_conn(conn1) pool._put_conn(conn2) pool.close() assert pool.pool is None try: pool.close() except AttributeError: pytest.fail("Pool of the ConnectionPool is None and has no attribute get.") def test_pool_timeouts(self) -> None: with HTTPConnectionPool(host="localhost") as pool: conn = pool._new_conn() assert conn.__class__ == HTTPConnection assert pool.timeout.__class__ == Timeout assert pool.timeout._read == _DEFAULT_TIMEOUT assert pool.timeout._connect == _DEFAULT_TIMEOUT assert pool.timeout.total is None pool = HTTPConnectionPool(host="localhost", timeout=SHORT_TIMEOUT) assert pool.timeout._read == SHORT_TIMEOUT assert pool.timeout._connect == SHORT_TIMEOUT assert pool.timeout.total is None def test_no_host(self) -> None: with pytest.raises(LocationValueError): HTTPConnectionPool(None) # type: ignore[arg-type] def test_contextmanager(self) -> None: with connection_from_url("http://google.com:80") as pool: # Populate with some connections conn1 = pool._get_conn() conn2 = pool._get_conn() conn3 = pool._get_conn() pool._put_conn(conn1) pool._put_conn(conn2) old_pool_queue = pool.pool assert pool.pool is None with pytest.raises(ClosedPoolError): pool._get_conn() pool._put_conn(conn3) with pytest.raises(ClosedPoolError): pool._get_conn() with pytest.raises(Empty): assert old_pool_queue is not None old_pool_queue.get(block=False) def test_url_from_pool(self) -> None: with connection_from_url("http://google.com:80") as pool: path = "path?query=foo" assert f"http://google.com:80/{path}" == _url_from_pool(pool, path) def test_ca_certs_default_cert_required(self) -> None: with connection_from_url("https://google.com:80", ca_certs=DEFAULT_CA) as pool: conn = pool._get_conn() assert conn.cert_reqs == ssl.CERT_REQUIRED # type: ignore[attr-defined] def test_cleanup_on_extreme_connection_error(self) -> None: """ This test validates that we clean up properly even on exceptions that we'd not otherwise catch, i.e. those that inherit from BaseException like KeyboardInterrupt or gevent.Timeout. See #805 for more details. """ class RealBad(BaseException): pass def kaboom(*args: typing.Any, **kwargs: typing.Any) -> None: raise RealBad() with connection_from_url("http://localhost:80") as c: with patch.object(c, "_make_request", kaboom): assert c.pool is not None initial_pool_size = c.pool.qsize() try: # We need to release_conn this way or we'd put it away # regardless. c.urlopen("GET", "/", release_conn=False) except RealBad: pass new_pool_size = c.pool.qsize() assert initial_pool_size == new_pool_size def test_release_conn_param_is_respected_after_http_error_retry(self) -> None: """For successful ```urlopen(release_conn=False)```, the connection isn't released, even after a retry. This is a regression test for issue #651 [1], where the connection would be released if the initial request failed, even if a retry succeeded. [1] <https://github.com/urllib3/urllib3/issues/651> """ class _raise_once_make_request_function: """Callable that can mimic `_make_request()`. Raises the given exception on its first call, but returns a successful response on subsequent calls. """ def __init__( self, ex: type[BaseException], pool: HTTPConnectionPool ) -> None: super().__init__() self._ex: type[BaseException] | None = ex self._pool = pool def __call__( self, conn: HTTPConnection, method: str, url: str, *args: typing.Any, retries: Retry, **kwargs: typing.Any, ) -> HTTPResponse: if self._ex: ex, self._ex = self._ex, None raise ex() httplib_response = httplib.HTTPResponse(MockSock) # type: ignore[arg-type] httplib_response.fp = MockChunkedEncodingResponse([b"f", b"o", b"o"]) # type: ignore[assignment] httplib_response.headers = httplib_response.msg = httplib.HTTPMessage() response_conn: HTTPConnection | None = kwargs.get("response_conn") response = HTTPResponse( body=httplib_response, headers=httplib_response.headers, # type: ignore[arg-type] status=httplib_response.status, version=httplib_response.version, reason=httplib_response.reason, original_response=httplib_response, retries=retries, request_method=method, request_url=url, preload_content=False, connection=response_conn, pool=self._pool, ) return response def _test(exception: type[BaseException]) -> None: with HTTPConnectionPool(host="localhost", maxsize=1, block=True) as pool: # Verify that the request succeeds after two attempts, and that the # connection is left on the response object, instead of being # released back into the pool. with patch.object( pool, "_make_request", _raise_once_make_request_function(exception, pool), ): response = pool.urlopen( "GET", "/", retries=1, release_conn=False, preload_content=False, chunked=True, ) assert pool.pool is not None assert pool.pool.qsize() == 0 assert pool.num_connections == 2 assert response.connection is not None response.release_conn() assert pool.pool.qsize() == 1 assert response.connection is None # Run the test case for all the retriable exceptions. _test(TimeoutError) _test(HTTPException) _test(SocketError) _test(ProtocolError) def test_read_timeout_0_does_not_raise_bad_status_line_error(self) -> None: with HTTPConnectionPool(host="localhost", maxsize=1) as pool: conn = Mock(spec=HTTPConnection) # Needed to tell the pool that the connection is alive. conn.is_closed = False with patch.object(Timeout, "read_timeout", 0): timeout = Timeout(1, 1, 1) with pytest.raises(ReadTimeoutError): pool._make_request(conn, "", "", timeout=timeout) test_queue_monkeypatch.py 0000644 00000001371 15025234504 0011705 0 ustar 00 from __future__ import annotations import queue from unittest import mock import pytest from urllib3 import HTTPConnectionPool from urllib3.exceptions import EmptyPoolError class BadError(Exception): """ This should not be raised. """ class TestMonkeypatchResistance: """ Test that connection pool works even with a monkey patched Queue module, see obspy/obspy#1599, psf/requests#3742, urllib3/urllib3#1061. """ def test_queue_monkeypatching(self) -> None: with mock.patch.object(queue, "Empty", BadError): with HTTPConnectionPool(host="localhost", block=True) as http: http._get_conn() with pytest.raises(EmptyPoolError): http._get_conn(timeout=0) test_wait.py 0000644 00000013557 15025234504 0007134 0 ustar 00 from __future__ import annotations import signal import threading import time import typing from socket import socket, socketpair from types import FrameType import pytest from urllib3.util.wait import ( _have_working_poll, poll_wait_for_socket, select_wait_for_socket, wait_for_read, wait_for_socket, wait_for_write, ) TYPE_SOCKET_PAIR = typing.Tuple[socket, socket] TYPE_WAIT_FOR = typing.Callable[..., bool] @pytest.fixture def spair() -> typing.Generator[TYPE_SOCKET_PAIR, None, None]: a, b = socketpair() yield a, b a.close() b.close() variants: list[TYPE_WAIT_FOR] = [wait_for_socket, select_wait_for_socket] if _have_working_poll(): variants.append(poll_wait_for_socket) @pytest.mark.parametrize("wfs", variants) def test_wait_for_socket(wfs: TYPE_WAIT_FOR, spair: TYPE_SOCKET_PAIR) -> None: a, b = spair with pytest.raises(RuntimeError): wfs(a, read=False, write=False) assert not wfs(a, read=True, timeout=0) assert wfs(a, write=True, timeout=0) b.send(b"x") assert wfs(a, read=True, timeout=0) assert wfs(a, read=True, timeout=10) assert wfs(a, read=True, timeout=None) # Fill up the socket with data a.setblocking(False) try: while True: a.send(b"x" * 999999) except OSError: pass # Now it's not writable anymore assert not wfs(a, write=True, timeout=0) # But if we ask for read-or-write, that succeeds assert wfs(a, read=True, write=True, timeout=0) # Unless we read from it assert a.recv(1) == b"x" assert not wfs(a, read=True, write=True, timeout=0) # But if the remote peer closes the socket, then it becomes readable b.close() assert wfs(a, read=True, timeout=0) # Waiting for a socket that's actually been closed is just a bug, and # raises some kind of helpful exception (exact details depend on the # platform). with pytest.raises(Exception): wfs(b, read=True) def test_wait_for_read_write(spair: TYPE_SOCKET_PAIR) -> None: a, b = spair assert not wait_for_read(a, 0) assert wait_for_write(a, 0) b.send(b"x") assert wait_for_read(a, 0) assert wait_for_write(a, 0) # Fill up the socket with data a.setblocking(False) try: while True: a.send(b"x" * 999999) except OSError: pass # Now it's not writable anymore assert not wait_for_write(a, 0) @pytest.mark.skipif(not hasattr(signal, "setitimer"), reason="need setitimer() support") @pytest.mark.parametrize("wfs", variants) def test_eintr(wfs: TYPE_WAIT_FOR, spair: TYPE_SOCKET_PAIR) -> None: a, b = spair interrupt_count = [0] def handler(sig: int, frame: FrameType | None) -> typing.Any: assert sig == signal.SIGALRM interrupt_count[0] += 1 old_handler = signal.signal(signal.SIGALRM, handler) try: assert not wfs(a, read=True, timeout=0) start = time.monotonic() try: # Start delivering SIGALRM 10 times per second signal.setitimer(signal.ITIMER_REAL, 0.1, 0.1) # Sleep for 1 second (we hope!) wfs(a, read=True, timeout=1) finally: # Stop delivering SIGALRM signal.setitimer(signal.ITIMER_REAL, 0) end = time.monotonic() dur = end - start assert 0.9 < dur < 3 finally: signal.signal(signal.SIGALRM, old_handler) assert interrupt_count[0] > 0 @pytest.mark.skipif(not hasattr(signal, "setitimer"), reason="need setitimer() support") @pytest.mark.parametrize("wfs", variants) def test_eintr_zero_timeout(wfs: TYPE_WAIT_FOR, spair: TYPE_SOCKET_PAIR) -> None: a, b = spair interrupt_count = [0] def handler(sig: int, frame: FrameType | None) -> typing.Any: assert sig == signal.SIGALRM interrupt_count[0] += 1 old_handler = signal.signal(signal.SIGALRM, handler) try: assert not wfs(a, read=True, timeout=0) try: # Start delivering SIGALRM 1000 times per second, # to trigger race conditions such as # https://github.com/urllib3/urllib3/issues/1396. signal.setitimer(signal.ITIMER_REAL, 0.001, 0.001) # Hammer the system call for a while to trigger the # race. end = time.monotonic() + 5 for i in range(100000): wfs(a, read=True, timeout=0) if time.monotonic() >= end: break finally: # Stop delivering SIGALRM signal.setitimer(signal.ITIMER_REAL, 0) finally: signal.signal(signal.SIGALRM, old_handler) assert interrupt_count[0] > 0 @pytest.mark.skipif(not hasattr(signal, "setitimer"), reason="need setitimer() support") @pytest.mark.parametrize("wfs", variants) def test_eintr_infinite_timeout(wfs: TYPE_WAIT_FOR, spair: TYPE_SOCKET_PAIR) -> None: a, b = spair interrupt_count = [0] def handler(sig: int, frame: FrameType | None) -> typing.Any: assert sig == signal.SIGALRM interrupt_count[0] += 1 def make_a_readable_after_one_second() -> None: time.sleep(1) b.send(b"x") old_handler = signal.signal(signal.SIGALRM, handler) try: assert not wfs(a, read=True, timeout=0) start = time.monotonic() try: # Start delivering SIGALRM 10 times per second signal.setitimer(signal.ITIMER_REAL, 0.1, 0.1) # Sleep for 1 second (we hope!) thread = threading.Thread(target=make_a_readable_after_one_second) thread.start() wfs(a, read=True) finally: # Stop delivering SIGALRM signal.setitimer(signal.ITIMER_REAL, 0) thread.join() end = time.monotonic() dur = end - start assert 0.9 < dur < 3 finally: signal.signal(signal.SIGALRM, old_handler) assert interrupt_count[0] > 0 with_dummyserver/test_poolmanager.py 0000644 00000060763 15025234504 0014112 0 ustar 00 from __future__ import annotations import gzip import typing from test import LONG_TIMEOUT from unittest import mock import pytest from dummyserver.socketserver import HAS_IPV6 from dummyserver.testcase import ( HypercornDummyServerTestCase, IPv6HypercornDummyServerTestCase, ) from urllib3 import HTTPHeaderDict, HTTPResponse, request from urllib3.connectionpool import port_by_scheme from urllib3.exceptions import MaxRetryError, URLSchemeUnknown from urllib3.poolmanager import PoolManager from urllib3.util.retry import Retry class TestPoolManager(HypercornDummyServerTestCase): @classmethod def setup_class(cls) -> None: super().setup_class() cls.base_url = f"http://{cls.host}:{cls.port}" cls.base_url_alt = f"http://{cls.host_alt}:{cls.port}" def test_redirect(self) -> None: with PoolManager() as http: r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url}/"}, redirect=False, ) assert r.status == 303 r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url}/"}, ) assert r.status == 200 assert r.data == b"Dummy server!" def test_redirect_twice(self) -> None: with PoolManager() as http: r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url}/redirect"}, redirect=False, ) assert r.status == 303 r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url}/redirect?target={self.base_url}/"}, ) assert r.status == 200 assert r.data == b"Dummy server!" def test_redirect_to_relative_url(self) -> None: with PoolManager() as http: r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": "/redirect"}, redirect=False, ) assert r.status == 303 r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": "/redirect"} ) assert r.status == 200 assert r.data == b"Dummy server!" def test_cross_host_redirect(self) -> None: with PoolManager() as http: cross_host_location = f"{self.base_url_alt}/echo?a=b" with pytest.raises(MaxRetryError): http.request( "GET", f"{self.base_url}/redirect", fields={"target": cross_host_location}, timeout=LONG_TIMEOUT, retries=0, ) r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/echo?a=b"}, timeout=LONG_TIMEOUT, retries=1, ) assert isinstance(r, HTTPResponse) assert r._pool is not None assert r._pool.host == self.host_alt def test_too_many_redirects(self) -> None: with PoolManager() as http: with pytest.raises(MaxRetryError): http.request( "GET", f"{self.base_url}/redirect", fields={ "target": f"{self.base_url}/redirect?target={self.base_url}/" }, retries=1, preload_content=False, ) with pytest.raises(MaxRetryError): http.request( "GET", f"{self.base_url}/redirect", fields={ "target": f"{self.base_url}/redirect?target={self.base_url}/" }, retries=Retry(total=None, redirect=1), preload_content=False, ) # Even with preload_content=False and raise on redirects, we reused the same # connection assert len(http.pools) == 1 pool = http.connection_from_host(self.host, self.port) assert pool.num_connections == 1 def test_redirect_cross_host_remove_headers(self) -> None: with PoolManager() as http: r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, headers={ "Authorization": "foo", "Proxy-Authorization": "bar", "Cookie": "foo=bar", }, ) assert r.status == 200 data = r.json() assert "Authorization" not in data assert "Proxy-Authorization" not in data assert "Cookie" not in data r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, headers={ "authorization": "foo", "proxy-authorization": "baz", "cookie": "foo=bar", }, ) assert r.status == 200 data = r.json() assert "authorization" not in data assert "Authorization" not in data assert "proxy-authorization" not in data assert "Proxy-Authorization" not in data assert "cookie" not in data assert "Cookie" not in data def test_redirect_cross_host_no_remove_headers(self) -> None: with PoolManager() as http: r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, headers={ "Authorization": "foo", "Proxy-Authorization": "bar", "Cookie": "foo=bar", }, retries=Retry(remove_headers_on_redirect=[]), ) assert r.status == 200 data = r.json() assert data["Authorization"] == "foo" assert data["Proxy-Authorization"] == "bar" assert data["Cookie"] == "foo=bar" def test_redirect_cross_host_set_removed_headers(self) -> None: with PoolManager() as http: r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, headers={ "X-API-Secret": "foo", "Authorization": "bar", "Proxy-Authorization": "baz", "Cookie": "foo=bar", }, retries=Retry(remove_headers_on_redirect=["X-API-Secret"]), ) assert r.status == 200 data = r.json() assert "X-API-Secret" not in data assert data["Authorization"] == "bar" assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" headers = { "x-api-secret": "foo", "authorization": "bar", "proxy-authorization": "baz", "cookie": "foo=bar", } r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url_alt}/headers"}, headers=headers, retries=Retry(remove_headers_on_redirect=["X-API-Secret"]), ) assert r.status == 200 data = r.json() assert "x-api-secret" not in data assert "X-API-Secret" not in data assert data["Authorization"] == "bar" assert data["Proxy-Authorization"] == "baz" assert data["Cookie"] == "foo=bar" # Ensure the header argument itself is not modified in-place. assert headers == { "x-api-secret": "foo", "authorization": "bar", "proxy-authorization": "baz", "cookie": "foo=bar", } def test_redirect_without_preload_releases_connection(self) -> None: with PoolManager(block=True, maxsize=2) as http: r = http.request("GET", f"{self.base_url}/redirect", preload_content=False) assert isinstance(r, HTTPResponse) assert r._pool is not None assert r._pool.num_requests == 2 assert r._pool.num_connections == 1 assert len(http.pools) == 1 def test_303_redirect_makes_request_lose_body(self) -> None: with PoolManager() as http: response = http.request( "POST", f"{self.base_url}/redirect", fields={ "target": f"{self.base_url}/headers_and_params", "status": "303 See Other", }, ) data = response.json() assert data["params"] == {} assert "Content-Type" not in HTTPHeaderDict(data["headers"]) def test_unknown_scheme(self) -> None: with PoolManager() as http: unknown_scheme = "unknown" unknown_scheme_url = f"{unknown_scheme}://host" with pytest.raises(URLSchemeUnknown) as e: r = http.request("GET", unknown_scheme_url) assert e.value.scheme == unknown_scheme r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": unknown_scheme_url}, redirect=False, ) assert r.status == 303 assert r.headers.get("Location") == unknown_scheme_url with pytest.raises(URLSchemeUnknown) as e: r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": unknown_scheme_url}, ) assert e.value.scheme == unknown_scheme def test_raise_on_redirect(self) -> None: with PoolManager() as http: r = http.request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url}/redirect?target={self.base_url}/"}, retries=Retry(total=None, redirect=1, raise_on_redirect=False), ) assert r.status == 303 def test_raise_on_status(self) -> None: with PoolManager() as http: with pytest.raises(MaxRetryError): # the default is to raise r = http.request( "GET", f"{self.base_url}/status", fields={"status": "500 Internal Server Error"}, retries=Retry(total=1, status_forcelist=range(500, 600)), ) with pytest.raises(MaxRetryError): # raise explicitly r = http.request( "GET", f"{self.base_url}/status", fields={"status": "500 Internal Server Error"}, retries=Retry( total=1, status_forcelist=range(500, 600), raise_on_status=True ), ) # don't raise r = http.request( "GET", f"{self.base_url}/status", fields={"status": "500 Internal Server Error"}, retries=Retry( total=1, status_forcelist=range(500, 600), raise_on_status=False ), ) assert r.status == 500 def test_missing_port(self) -> None: # Can a URL that lacks an explicit port like ':80' succeed, or # will all such URLs fail with an error? with PoolManager() as http: # By globally adjusting `port_by_scheme` we pretend for a moment # that HTTP's default port is not 80, but is the port at which # our test server happens to be listening. port_by_scheme["http"] = self.port try: r = http.request("GET", f"http://{self.host}/", retries=0) finally: port_by_scheme["http"] = 80 assert r.status == 200 assert r.data == b"Dummy server!" def test_headers(self) -> None: with PoolManager(headers={"Foo": "bar"}) as http: r = http.request("GET", f"{self.base_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" r = http.request("POST", f"{self.base_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" r = http.request_encode_url("GET", f"{self.base_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" r = http.request_encode_body("POST", f"{self.base_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" r = http.request_encode_url( "GET", f"{self.base_url}/headers", headers={"Baz": "quux"} ) returned_headers = r.json() assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" r = http.request_encode_body( "GET", f"{self.base_url}/headers", headers={"Baz": "quux"} ) returned_headers = r.json() assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" def test_headers_http_header_dict(self) -> None: # Test uses a list of headers to assert the order # that headers are sent in the request too. headers = HTTPHeaderDict() headers.add("Foo", "bar") headers.add("Multi", "1") headers.add("Baz", "quux") headers.add("Multi", "2") with PoolManager(headers=headers) as http: r = http.request("GET", f"{self.base_url}/multi_headers") returned_headers = r.json()["headers"] assert returned_headers[-4:] == [ ["Foo", "bar"], ["Multi", "1"], ["Multi", "2"], ["Baz", "quux"], ] r = http.request( "GET", f"{self.base_url}/multi_headers", headers={ **headers, "Extra": "extra", "Foo": "new", }, ) returned_headers = r.json()["headers"] assert returned_headers[-4:] == [ ["Foo", "new"], ["Multi", "1, 2"], ["Baz", "quux"], ["Extra", "extra"], ] def test_merge_headers_with_pool_manager_headers(self) -> None: headers = HTTPHeaderDict() headers.add("Cookie", "choc-chip") headers.add("Cookie", "oatmeal-raisin") orig = headers.copy() added_headers = {"Cookie": "tim-tam"} with PoolManager(headers=headers) as http: r = http.request( "GET", f"{self.base_url}/multi_headers", headers=typing.cast(HTTPHeaderDict, http.headers) | added_headers, ) returned_headers = r.json()["headers"] assert returned_headers[-3:] == [ ["Cookie", "choc-chip"], ["Cookie", "oatmeal-raisin"], ["Cookie", "tim-tam"], ] # make sure the pool headers weren't modified assert http.headers == orig def test_headers_http_multi_header_multipart(self) -> None: headers = HTTPHeaderDict() headers.add("Multi", "1") headers.add("Multi", "2") old_headers = headers.copy() with PoolManager(headers=headers) as http: r = http.request( "POST", f"{self.base_url}/multi_headers", fields={"k": "v"}, multipart_boundary="b", encode_multipart=True, ) returned_headers = r.json()["headers"] assert returned_headers[5:] == [ ["Multi", "1"], ["Multi", "2"], ["Content-Type", "multipart/form-data; boundary=b"], ] # Assert that the previous headers weren't modified. assert headers == old_headers # Set a default value for the Content-Type headers["Content-Type"] = "multipart/form-data; boundary=b; field=value" r = http.request( "POST", f"{self.base_url}/multi_headers", fields={"k": "v"}, multipart_boundary="b", encode_multipart=True, ) returned_headers = r.json()["headers"] assert returned_headers[5:] == [ ["Multi", "1"], ["Multi", "2"], # Uses the set value, not the one that would be generated. ["Content-Type", "multipart/form-data; boundary=b; field=value"], ] def test_body(self) -> None: with PoolManager() as http: r = http.request("POST", f"{self.base_url}/echo", body=b"test") assert r.data == b"test" def test_http_with_ssl_keywords(self) -> None: with PoolManager(ca_certs="REQUIRED") as http: r = http.request("GET", f"http://{self.host}:{self.port}/") assert r.status == 200 def test_http_with_server_hostname(self) -> None: with PoolManager(server_hostname="example.com") as http: r = http.request("GET", f"http://{self.host}:{self.port}/") assert r.status == 200 def test_http_with_ca_cert_dir(self) -> None: with PoolManager(ca_certs="REQUIRED", ca_cert_dir="/nosuchdir") as http: r = http.request("GET", f"http://{self.host}:{self.port}/") assert r.status == 200 @pytest.mark.parametrize( ["target", "expected_target"], [ # annoyingly quart.request.full_path adds a stray `?` ("/echo_uri", b"/echo_uri?"), ("/echo_uri?q=1#fragment", b"/echo_uri?q=1"), ("/echo_uri?#", b"/echo_uri?"), ("/echo_uri#!", b"/echo_uri?"), ("/echo_uri#!#", b"/echo_uri?"), ("/echo_uri??#", b"/echo_uri??"), ("/echo_uri?%3f#", b"/echo_uri?%3F"), ("/echo_uri?%3F#", b"/echo_uri?%3F"), ("/echo_uri?[]", b"/echo_uri?%5B%5D"), ], ) def test_encode_http_target(self, target: str, expected_target: bytes) -> None: with PoolManager() as http: url = f"http://{self.host}:{self.port}{target}" r = http.request("GET", url) assert r.data == expected_target def test_top_level_request(self) -> None: r = request("GET", f"{self.base_url}/") assert r.status == 200 assert r.data == b"Dummy server!" def test_top_level_request_without_keyword_args(self) -> None: body = "" with pytest.raises(TypeError): request("GET", f"{self.base_url}/", body) # type: ignore[misc] def test_top_level_request_with_body(self) -> None: r = request("POST", f"{self.base_url}/echo", body=b"test") assert r.status == 200 assert r.data == b"test" def test_top_level_request_with_preload_content(self) -> None: r = request("GET", f"{self.base_url}/echo", preload_content=False) assert r.status == 200 assert r.connection is not None r.data assert r.connection is None def test_top_level_request_with_decode_content(self) -> None: r = request( "GET", f"{self.base_url}/encodingrequest", headers={"accept-encoding": "gzip"}, decode_content=False, ) assert r.status == 200 assert gzip.decompress(r.data) == b"hello, world!" r = request( "GET", f"{self.base_url}/encodingrequest", headers={"accept-encoding": "gzip"}, decode_content=True, ) assert r.status == 200 assert r.data == b"hello, world!" def test_top_level_request_with_redirect(self) -> None: r = request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url}/"}, redirect=False, ) assert r.status == 303 r = request( "GET", f"{self.base_url}/redirect", fields={"target": f"{self.base_url}/"}, redirect=True, ) assert r.status == 200 assert r.data == b"Dummy server!" def test_top_level_request_with_retries(self) -> None: r = request("GET", f"{self.base_url}/redirect", retries=False) assert r.status == 303 r = request("GET", f"{self.base_url}/redirect", retries=3) assert r.status == 200 def test_top_level_request_with_timeout(self) -> None: with mock.patch("urllib3.poolmanager.RequestMethods.request") as mockRequest: mockRequest.return_value = HTTPResponse(status=200) r = request("GET", f"{self.base_url}/redirect", timeout=2.5) assert r.status == 200 mockRequest.assert_called_with( "GET", f"{self.base_url}/redirect", body=None, fields=None, headers=None, preload_content=True, decode_content=True, redirect=True, retries=None, timeout=2.5, json=None, ) @pytest.mark.parametrize( "headers", [ None, {"content-Type": "application/json"}, {"content-Type": "text/plain"}, {"attribute": "value", "CONTENT-TYPE": "application/json"}, HTTPHeaderDict(cookie="foo, bar"), ], ) def test_request_with_json(self, headers: HTTPHeaderDict) -> None: old_headers = None if headers is None else headers.copy() body = {"attribute": "value"} r = request( method="POST", url=f"{self.base_url}/echo_json", headers=headers, json=body ) assert r.status == 200 assert r.json() == body content_type = HTTPHeaderDict(old_headers).get( "Content-Type", "application/json" ) assert content_type in r.headers["Content-Type"].replace(" ", "").split(",") # Ensure the header argument itself is not modified in-place. assert headers == old_headers def test_top_level_request_with_json_with_httpheaderdict(self) -> None: body = {"attribute": "value"} header = HTTPHeaderDict(cookie="foo, bar") with PoolManager(headers=header) as http: r = http.request(method="POST", url=f"{self.base_url}/echo_json", json=body) assert r.status == 200 assert r.json() == body assert "application/json" in r.headers["Content-Type"].replace( " ", "" ).split(",") def test_top_level_request_with_body_and_json(self) -> None: match = "request got values for both 'body' and 'json' parameters which are mutually exclusive" with pytest.raises(TypeError, match=match): body = {"attribute": "value"} request(method="POST", url=f"{self.base_url}/echo", body="", json=body) def test_top_level_request_with_invalid_body(self) -> None: class BadBody: def __repr__(self) -> str: return "<BadBody>" with pytest.raises(TypeError) as e: request( method="POST", url=f"{self.base_url}/echo", body=BadBody(), # type: ignore[arg-type] ) assert str(e.value) == ( "'body' must be a bytes-like object, file-like " "object, or iterable. Instead was <BadBody>" ) @pytest.mark.skipif(not HAS_IPV6, reason="IPv6 is not supported on this system") class TestIPv6PoolManager(IPv6HypercornDummyServerTestCase): @classmethod def setup_class(cls) -> None: super().setup_class() cls.base_url = f"http://[{cls.host}]:{cls.port}" def test_ipv6(self) -> None: with PoolManager() as http: http.request("GET", self.base_url) with_dummyserver/test_connection.py 0000644 00000010641 15025234504 0013733 0 ustar 00 from __future__ import annotations import contextlib import sys import typing from http.client import ResponseNotReady from unittest import mock import pytest from dummyserver.testcase import HypercornDummyServerTestCase as server from urllib3 import HTTPConnectionPool from urllib3.response import HTTPResponse @pytest.fixture() def pool() -> typing.Generator[HTTPConnectionPool, None, None]: server.setup_class() with HTTPConnectionPool(server.host, server.port) as pool: yield pool server.teardown_class() def test_returns_urllib3_HTTPResponse(pool: HTTPConnectionPool) -> None: with contextlib.closing(pool._get_conn()) as conn: conn.request("GET", "/") response = conn.getresponse() assert isinstance(response, HTTPResponse) @pytest.mark.skipif(not hasattr(sys, "audit"), reason="requires python 3.8+") @mock.patch("urllib3.connection.sys.audit") def test_audit_event(audit_mock: mock.Mock, pool: HTTPConnectionPool) -> None: with contextlib.closing(pool._get_conn()) as conn: conn.request("GET", "/") audit_mock.assert_any_call("http.client.connect", conn, conn.host, conn.port) # Ensure the event is raised only once. connect_events = [ call for call in audit_mock.mock_calls if call.args[0] == "http.client.connect" ] assert len(connect_events) == 1 def test_does_not_release_conn(pool: HTTPConnectionPool) -> None: with contextlib.closing(pool._get_conn()) as conn: conn.request("GET", "/") response = conn.getresponse() response.release_conn() assert pool.pool.qsize() == 0 # type: ignore[union-attr] def test_releases_conn(pool: HTTPConnectionPool) -> None: with contextlib.closing(pool._get_conn()) as conn: conn.request("GET", "/") response = conn.getresponse() # If these variables are set by the pool # then the response can release the connection # back into the pool. response._pool = pool # type: ignore[attr-defined] response._connection = conn # type: ignore[attr-defined] response.release_conn() assert pool.pool.qsize() == 1 # type: ignore[union-attr] def test_double_getresponse(pool: HTTPConnectionPool) -> None: with contextlib.closing(pool._get_conn()) as conn: conn.request("GET", "/") _ = conn.getresponse() # Calling getrepsonse() twice should cause an error with pytest.raises(ResponseNotReady): conn.getresponse() def test_connection_state_properties(pool: HTTPConnectionPool) -> None: conn = pool._get_conn() assert conn.is_closed is True assert conn.is_connected is False assert conn.has_connected_to_proxy is False assert conn.is_verified is False assert conn.proxy_is_verified is None conn.connect() assert conn.is_closed is False assert conn.is_connected is True assert conn.has_connected_to_proxy is False assert conn.is_verified is False assert conn.proxy_is_verified is None conn.request("GET", "/") resp = conn.getresponse() assert resp.status == 200 conn.close() assert conn.is_closed is True assert conn.is_connected is False assert conn.has_connected_to_proxy is False assert conn.is_verified is False assert conn.proxy_is_verified is None def test_set_tunnel_is_reset(pool: HTTPConnectionPool) -> None: conn = pool._get_conn() assert conn.is_closed is True assert conn.is_connected is False assert conn.has_connected_to_proxy is False assert conn.is_verified is False assert conn.proxy_is_verified is None conn.set_tunnel(host="host", port=8080, scheme="http") assert conn._tunnel_host == "host" # type: ignore[attr-defined] assert conn._tunnel_port == 8080 # type: ignore[attr-defined] assert conn._tunnel_scheme == "http" # type: ignore[attr-defined] conn.close() assert conn._tunnel_host is None # type: ignore[attr-defined] assert conn._tunnel_port is None # type: ignore[attr-defined] assert conn._tunnel_scheme is None # type: ignore[attr-defined] def test_invalid_tunnel_scheme(pool: HTTPConnectionPool) -> None: conn = pool._get_conn() with pytest.raises(ValueError) as e: conn.set_tunnel(host="host", port=8080, scheme="socks") assert ( str(e.value) == "Invalid proxy scheme for tunneling: 'socks', must be either 'http' or 'https'" ) with_dummyserver/test_connectionpool.py 0000644 00000156167 15025234504 0014643 0 ustar 00 from __future__ import annotations import io import socket import time import typing import warnings from test import LONG_TIMEOUT, SHORT_TIMEOUT from threading import Event from unittest import mock from urllib.parse import urlencode import pytest from dummyserver.socketserver import NoIPv6Warning from dummyserver.testcase import HypercornDummyServerTestCase, SocketDummyServerTestCase from urllib3 import HTTPConnectionPool, encode_multipart_formdata from urllib3._collections import HTTPHeaderDict from urllib3.connection import _get_default_user_agent from urllib3.exceptions import ( ConnectTimeoutError, DecodeError, EmptyPoolError, MaxRetryError, NameResolutionError, NewConnectionError, ReadTimeoutError, UnrewindableBodyError, ) from urllib3.fields import _TYPE_FIELD_VALUE_TUPLE from urllib3.util import SKIP_HEADER, SKIPPABLE_HEADERS from urllib3.util.retry import RequestHistory, Retry from urllib3.util.timeout import _TYPE_TIMEOUT, Timeout from .. import INVALID_SOURCE_ADDRESSES, TARPIT_HOST, VALID_SOURCE_ADDRESSES from ..port_helpers import find_unused_port def wait_for_socket(ready_event: Event) -> None: ready_event.wait() ready_event.clear() class TestConnectionPoolTimeouts(SocketDummyServerTestCase): def test_timeout_float(self) -> None: block_event = Event() ready_event = self.start_basic_handler(block_send=block_event, num=2) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: wait_for_socket(ready_event) with pytest.raises(ReadTimeoutError): pool.request("GET", "/", timeout=SHORT_TIMEOUT) block_event.set() # Release block # Shouldn't raise this time wait_for_socket(ready_event) block_event.set() # Pre-release block pool.request("GET", "/", timeout=LONG_TIMEOUT) def test_conn_closed(self) -> None: block_event = Event() self.start_basic_handler(block_send=block_event, num=1) with HTTPConnectionPool( self.host, self.port, timeout=SHORT_TIMEOUT, retries=False ) as pool: conn = pool._get_conn() pool._put_conn(conn) try: with pytest.raises(ReadTimeoutError): pool.urlopen("GET", "/") if not conn.is_closed: with pytest.raises(socket.error): conn.sock.recv(1024) # type: ignore[attr-defined] finally: pool._put_conn(conn) block_event.set() def test_timeout(self) -> None: # Requests should time out when expected block_event = Event() ready_event = self.start_basic_handler(block_send=block_event, num=3) # Pool-global timeout short_timeout = Timeout(read=SHORT_TIMEOUT) with HTTPConnectionPool( self.host, self.port, timeout=short_timeout, retries=False ) as pool: wait_for_socket(ready_event) block_event.clear() with pytest.raises(ReadTimeoutError): pool.request("GET", "/") block_event.set() # Release request # Request-specific timeouts should raise errors with HTTPConnectionPool( self.host, self.port, timeout=short_timeout, retries=False ) as pool: wait_for_socket(ready_event) now = time.time() with pytest.raises(ReadTimeoutError): pool.request("GET", "/", timeout=LONG_TIMEOUT) delta = time.time() - now message = "timeout was pool-level SHORT_TIMEOUT rather than request-level LONG_TIMEOUT" assert delta >= (LONG_TIMEOUT - 1e-5), message block_event.set() # Release request # Timeout passed directly to request should raise a request timeout wait_for_socket(ready_event) with pytest.raises(ReadTimeoutError): pool.request("GET", "/", timeout=SHORT_TIMEOUT) block_event.set() # Release request def test_connect_timeout(self) -> None: url = "/" host, port = TARPIT_HOST, 80 timeout = Timeout(connect=SHORT_TIMEOUT) # Pool-global timeout with HTTPConnectionPool(host, port, timeout=timeout) as pool: conn = pool._get_conn() with pytest.raises(ConnectTimeoutError): pool._make_request(conn, "GET", url) # Retries retries = Retry(connect=0) with pytest.raises(MaxRetryError): pool.request("GET", url, retries=retries) # Request-specific connection timeouts big_timeout = Timeout(read=LONG_TIMEOUT, connect=LONG_TIMEOUT) with HTTPConnectionPool(host, port, timeout=big_timeout, retries=False) as pool: conn = pool._get_conn() with pytest.raises(ConnectTimeoutError): pool._make_request(conn, "GET", url, timeout=timeout) pool._put_conn(conn) with pytest.raises(ConnectTimeoutError): pool.request("GET", url, timeout=timeout) def test_total_applies_connect(self) -> None: host, port = TARPIT_HOST, 80 timeout = Timeout(total=None, connect=SHORT_TIMEOUT) with HTTPConnectionPool(host, port, timeout=timeout) as pool: conn = pool._get_conn() try: with pytest.raises(ConnectTimeoutError): pool._make_request(conn, "GET", "/") finally: conn.close() timeout = Timeout(connect=3, read=5, total=SHORT_TIMEOUT) with HTTPConnectionPool(host, port, timeout=timeout) as pool: conn = pool._get_conn() try: with pytest.raises(ConnectTimeoutError): pool._make_request(conn, "GET", "/") finally: conn.close() def test_total_timeout(self) -> None: block_event = Event() ready_event = self.start_basic_handler(block_send=block_event, num=2) wait_for_socket(ready_event) # This will get the socket to raise an EAGAIN on the read timeout = Timeout(connect=3, read=SHORT_TIMEOUT) with HTTPConnectionPool( self.host, self.port, timeout=timeout, retries=False ) as pool: with pytest.raises(ReadTimeoutError): pool.request("GET", "/") block_event.set() wait_for_socket(ready_event) block_event.clear() # The connect should succeed and this should hit the read timeout timeout = Timeout(connect=3, read=5, total=SHORT_TIMEOUT) with HTTPConnectionPool( self.host, self.port, timeout=timeout, retries=False ) as pool: with pytest.raises(ReadTimeoutError): pool.request("GET", "/") def test_create_connection_timeout(self) -> None: self.start_basic_handler(block_send=Event(), num=0) # needed for self.port timeout = Timeout(connect=SHORT_TIMEOUT, total=LONG_TIMEOUT) with HTTPConnectionPool( TARPIT_HOST, self.port, timeout=timeout, retries=False ) as pool: conn = pool._new_conn() with pytest.raises(ConnectTimeoutError): conn.connect() class TestConnectionPool(HypercornDummyServerTestCase): def test_get(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/specific_method", fields={"method": "GET"}) assert r.status == 200, r.data def test_post_url(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("POST", "/specific_method", fields={"method": "POST"}) assert r.status == 200, r.data def test_urlopen_put(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.urlopen("PUT", "/specific_method?method=PUT") assert r.status == 200, r.data def test_wrong_specific_method(self) -> None: # To make sure the dummy server is actually returning failed responses with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/specific_method", fields={"method": "POST"}) assert r.status == 400, r.data with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("POST", "/specific_method", fields={"method": "GET"}) assert r.status == 400, r.data def test_upload(self) -> None: data = "I'm in ur multipart form-data, hazing a cheezburgr" fields: dict[str, _TYPE_FIELD_VALUE_TUPLE] = { "upload_param": "filefield", "upload_filename": "lolcat.txt", "filefield": ("lolcat.txt", data), } fields["upload_size"] = len(data) # type: ignore[assignment] with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("POST", "/upload", fields=fields) assert r.status == 200, r.data def test_one_name_multiple_values(self) -> None: fields = [("foo", "a"), ("foo", "b")] with HTTPConnectionPool(self.host, self.port) as pool: # urlencode r = pool.request("GET", "/echo", fields=fields) assert r.data == b"foo=a&foo=b" # multipart r = pool.request("POST", "/echo", fields=fields) assert r.data.count(b'name="foo"') == 2 def test_request_method_body(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: body = b"hi" r = pool.request("POST", "/echo", body=body) assert r.data == body fields = [("hi", "hello")] with pytest.raises(TypeError): pool.request("POST", "/echo", body=body, fields=fields) def test_unicode_upload(self) -> None: fieldname = "myfile" filename = "\xe2\x99\xa5.txt" data = "\xe2\x99\xa5".encode() size = len(data) fields: dict[str, _TYPE_FIELD_VALUE_TUPLE] = { "upload_param": fieldname, "upload_filename": filename, fieldname: (filename, data), } fields["upload_size"] = size # type: ignore[assignment] with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("POST", "/upload", fields=fields) assert r.status == 200, r.data def test_nagle(self) -> None: """Test that connections have TCP_NODELAY turned on""" # This test needs to be here in order to be run. socket.create_connection actually tries # to connect to the host provided so we need a dummyserver to be running. with HTTPConnectionPool(self.host, self.port) as pool: conn = pool._get_conn() try: pool._make_request(conn, "GET", "/") tcp_nodelay_setting = conn.sock.getsockopt( # type: ignore[attr-defined] socket.IPPROTO_TCP, socket.TCP_NODELAY ) assert tcp_nodelay_setting finally: conn.close() @pytest.mark.parametrize( "socket_options", [ [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)], ((socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),), ], ) def test_socket_options(self, socket_options: tuple[int, int, int]) -> None: """Test that connections accept socket options.""" # This test needs to be here in order to be run. socket.create_connection actually tries to # connect to the host provided so we need a dummyserver to be running. with HTTPConnectionPool( self.host, self.port, socket_options=socket_options, ) as pool: # Get the socket of a new connection. s = pool._new_conn()._new_conn() # type: ignore[attr-defined] try: using_keepalive = ( s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0 ) assert using_keepalive finally: s.close() @pytest.mark.parametrize("socket_options", [None, []]) def test_disable_default_socket_options( self, socket_options: list[int] | None ) -> None: """Test that passing None or empty list disables all socket options.""" # This test needs to be here in order to be run. socket.create_connection actually tries # to connect to the host provided so we need a dummyserver to be running. with HTTPConnectionPool( self.host, self.port, socket_options=socket_options ) as pool: s = pool._new_conn()._new_conn() # type: ignore[attr-defined] try: using_nagle = s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) == 0 assert using_nagle finally: s.close() def test_defaults_are_applied(self) -> None: """Test that modifying the default socket options works.""" # This test needs to be here in order to be run. socket.create_connection actually tries # to connect to the host provided so we need a dummyserver to be running. with HTTPConnectionPool(self.host, self.port) as pool: # Get the HTTPConnection instance conn = pool._new_conn() try: # Update the default socket options assert conn.socket_options is not None conn.socket_options += [(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)] # type: ignore[operator] s = conn._new_conn() # type: ignore[attr-defined] nagle_disabled = ( s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY) > 0 ) using_keepalive = ( s.getsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE) > 0 ) assert nagle_disabled assert using_keepalive finally: conn.close() s.close() def test_connection_error_retries(self) -> None: """ECONNREFUSED error should raise a connection error, with retries""" port = find_unused_port() with HTTPConnectionPool(self.host, port) as pool: with pytest.raises(MaxRetryError) as e: pool.request("GET", "/", retries=Retry(connect=3)) assert type(e.value.reason) is NewConnectionError def test_timeout_success(self) -> None: timeout = Timeout(connect=3, read=5, total=None) with HTTPConnectionPool(self.host, self.port, timeout=timeout) as pool: pool.request("GET", "/") # This should not raise a "Timeout already started" error pool.request("GET", "/") with HTTPConnectionPool(self.host, self.port, timeout=timeout) as pool: # This should also not raise a "Timeout already started" error pool.request("GET", "/") timeout = Timeout(total=None) with HTTPConnectionPool(self.host, self.port, timeout=timeout) as pool: pool.request("GET", "/") socket_timeout_reuse_testdata = pytest.mark.parametrize( ["timeout", "expect_settimeout_calls"], [ (1, (1, 1)), (None, (None, None)), (Timeout(read=4), (None, 4)), (Timeout(read=4, connect=5), (5, 4)), (Timeout(connect=6), (6, None)), ], ) @socket_timeout_reuse_testdata def test_socket_timeout_updated_on_reuse_constructor( self, timeout: _TYPE_TIMEOUT, expect_settimeout_calls: typing.Sequence[float | None], ) -> None: with HTTPConnectionPool(self.host, self.port, timeout=timeout) as pool: # Make a request to create a new connection. pool.urlopen("GET", "/") # Grab the connection and mock the inner socket. assert pool.pool is not None conn = pool.pool.get_nowait() conn_sock = mock.Mock(wraps=conn.sock) conn.sock = conn_sock pool._put_conn(conn) # Assert that sock.settimeout() is called with the new connect timeout, then the read timeout. pool.urlopen("GET", "/", timeout=timeout) conn_sock.settimeout.assert_has_calls( [mock.call(x) for x in expect_settimeout_calls] ) @socket_timeout_reuse_testdata def test_socket_timeout_updated_on_reuse_parameter( self, timeout: _TYPE_TIMEOUT, expect_settimeout_calls: typing.Sequence[float | None], ) -> None: with HTTPConnectionPool(self.host, self.port) as pool: # Make a request to create a new connection. pool.urlopen("GET", "/", timeout=LONG_TIMEOUT) # Grab the connection and mock the inner socket. assert pool.pool is not None conn = pool.pool.get_nowait() conn_sock = mock.Mock(wraps=conn.sock) conn.sock = conn_sock pool._put_conn(conn) # Assert that sock.settimeout() is called with the new connect timeout, then the read timeout. pool.urlopen("GET", "/", timeout=timeout) conn_sock.settimeout.assert_has_calls( [mock.call(x) for x in expect_settimeout_calls] ) def test_tunnel(self) -> None: # note the actual httplib.py has no tests for this functionality timeout = Timeout(total=None) with HTTPConnectionPool(self.host, self.port, timeout=timeout) as pool: conn = pool._get_conn() try: conn.set_tunnel(self.host, self.port) with mock.patch.object( conn, "_tunnel", create=True, return_value=None ) as conn_tunnel: pool._make_request(conn, "GET", "/") conn_tunnel.assert_called_once_with() finally: conn.close() # test that it's not called when tunnel is not set timeout = Timeout(total=None) with HTTPConnectionPool(self.host, self.port, timeout=timeout) as pool: conn = pool._get_conn() try: with mock.patch.object( conn, "_tunnel", create=True, return_value=None ) as conn_tunnel: pool._make_request(conn, "GET", "/") assert not conn_tunnel.called finally: conn.close() def test_redirect_relative_url_no_deprecation(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: with warnings.catch_warnings(): warnings.simplefilter("error", DeprecationWarning) pool.request("GET", "/redirect", fields={"target": "/"}) def test_redirect(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/redirect", fields={"target": "/"}, redirect=False) assert r.status == 303 r = pool.request("GET", "/redirect", fields={"target": "/"}) assert r.status == 200 assert r.data == b"Dummy server!" def test_303_redirect_makes_request_lose_body(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request( "POST", "/redirect", fields={"target": "/headers_and_params", "status": "303 See Other"}, ) data = response.json() assert data["params"] == {} assert "Content-Type" not in HTTPHeaderDict(data["headers"]) def test_bad_connect(self) -> None: with HTTPConnectionPool("badhost.invalid", self.port) as pool: with pytest.raises(MaxRetryError) as e: pool.request("GET", "/", retries=5) assert type(e.value.reason) is NameResolutionError def test_keepalive(self) -> None: with HTTPConnectionPool(self.host, self.port, block=True, maxsize=1) as pool: r = pool.request("GET", "/keepalive?close=0") r = pool.request("GET", "/keepalive?close=0") assert r.status == 200 assert pool.num_connections == 1 assert pool.num_requests == 2 def test_keepalive_close(self) -> None: with HTTPConnectionPool( self.host, self.port, block=True, maxsize=1, timeout=2 ) as pool: r = pool.request( "GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"} ) assert pool.num_connections == 1 # The dummyserver will have responded with Connection:close, # and httplib will properly cleanup the socket. # We grab the HTTPConnection object straight from the Queue, # because _get_conn() is where the check & reset occurs assert pool.pool is not None conn = pool.pool.get() assert conn.sock is None pool._put_conn(conn) # Now with keep-alive r = pool.request( "GET", "/keepalive?close=0", retries=0, headers={"Connection": "keep-alive"}, ) # The dummyserver responded with Connection:keep-alive, the connection # persists. conn = pool.pool.get() assert conn.sock is not None pool._put_conn(conn) # Another request asking the server to close the connection. This one # should get cleaned up for the next request. r = pool.request( "GET", "/keepalive?close=1", retries=0, headers={"Connection": "close"} ) assert r.status == 200 conn = pool.pool.get() assert conn.sock is None pool._put_conn(conn) # Next request r = pool.request("GET", "/keepalive?close=0") def test_post_with_urlencode(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: data = {"banana": "hammock", "lol": "cat"} r = pool.request("POST", "/echo", fields=data, encode_multipart=False) assert r.data.decode("utf-8") == urlencode(data) def test_post_with_multipart(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: data = {"banana": "hammock", "lol": "cat"} r = pool.request("POST", "/echo", fields=data, encode_multipart=True) body = r.data.split(b"\r\n") encoded_data = encode_multipart_formdata(data)[0] expected_body = encoded_data.split(b"\r\n") # TODO: Get rid of extra parsing stuff when you can specify # a custom boundary to encode_multipart_formdata """ We need to loop the return lines because a timestamp is attached from within encode_multipart_formdata. When the server echos back the data, it has the timestamp from when the data was encoded, which is not equivalent to when we run encode_multipart_formdata on the data again. """ for i, line in enumerate(body): if line.startswith(b"--"): continue assert body[i] == expected_body[i] def test_post_with_multipart__iter__(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: data = {"hello": "world"} r = pool.request( "POST", "/echo", fields=data, preload_content=False, multipart_boundary="boundary", encode_multipart=True, ) chunks = [chunk for chunk in r] assert chunks == [ b"--boundary\r\n", b'Content-Disposition: form-data; name="hello"\r\n', b"\r\n", b"world\r\n", b"--boundary--\r\n", ] def test_check_gzip(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request( "GET", "/encodingrequest", headers={"accept-encoding": "gzip"} ) assert r.headers.get("content-encoding") == "gzip" assert r.data == b"hello, world!" def test_check_deflate(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request( "GET", "/encodingrequest", headers={"accept-encoding": "deflate"} ) assert r.headers.get("content-encoding") == "deflate" assert r.data == b"hello, world!" def test_bad_decode(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: with pytest.raises(DecodeError): pool.request( "GET", "/encodingrequest", headers={"accept-encoding": "garbage-deflate"}, ) with pytest.raises(DecodeError): pool.request( "GET", "/encodingrequest", headers={"accept-encoding": "garbage-gzip"}, ) def test_connection_count(self) -> None: with HTTPConnectionPool(self.host, self.port, maxsize=1) as pool: pool.request("GET", "/") pool.request("GET", "/") pool.request("GET", "/") assert pool.num_connections == 1 assert pool.num_requests == 3 def test_connection_count_bigpool(self) -> None: with HTTPConnectionPool(self.host, self.port, maxsize=16) as http_pool: http_pool.request("GET", "/") http_pool.request("GET", "/") http_pool.request("GET", "/") assert http_pool.num_connections == 1 assert http_pool.num_requests == 3 def test_partial_response(self) -> None: with HTTPConnectionPool(self.host, self.port, maxsize=1) as pool: req_data = {"lol": "cat"} resp_data = urlencode(req_data).encode("utf-8") r = pool.request("GET", "/echo", fields=req_data, preload_content=False) assert r.read(5) == resp_data[:5] assert r.read() == resp_data[5:] def test_lazy_load_twice(self) -> None: # This test is sad and confusing. Need to figure out what's # going on with partial reads and socket reuse. with HTTPConnectionPool( self.host, self.port, block=True, maxsize=1, timeout=2 ) as pool: payload_size = 1024 * 2 first_chunk = 512 boundary = "foo" req_data = {"count": "a" * payload_size} resp_data = encode_multipart_formdata(req_data, boundary=boundary)[0] req2_data = {"count": "b" * payload_size} resp2_data = encode_multipart_formdata(req2_data, boundary=boundary)[0] r1 = pool.request( "POST", "/echo", fields=req_data, multipart_boundary=boundary, preload_content=False, ) assert r1.read(first_chunk) == resp_data[:first_chunk] try: r2 = pool.request( "POST", "/echo", fields=req2_data, multipart_boundary=boundary, preload_content=False, pool_timeout=0.001, ) # This branch should generally bail here, but maybe someday it will # work? Perhaps by some sort of magic. Consider it a TODO. assert r2.read(first_chunk) == resp2_data[:first_chunk] assert r1.read() == resp_data[first_chunk:] assert r2.read() == resp2_data[first_chunk:] assert pool.num_requests == 2 except EmptyPoolError: assert r1.read() == resp_data[first_chunk:] assert pool.num_requests == 1 assert pool.num_connections == 1 def test_for_double_release(self) -> None: MAXSIZE = 5 # Check default state with HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE) as pool: assert pool.num_connections == 0 assert pool.pool is not None assert pool.pool.qsize() == MAXSIZE # Make an empty slot for testing pool.pool.get() assert pool.pool.qsize() == MAXSIZE - 1 # Check state after simple request pool.urlopen("GET", "/") assert pool.pool.qsize() == MAXSIZE - 1 # Check state without release pool.urlopen("GET", "/", preload_content=False) assert pool.pool.qsize() == MAXSIZE - 2 pool.urlopen("GET", "/") assert pool.pool.qsize() == MAXSIZE - 2 # Check state after read pool.urlopen("GET", "/").data assert pool.pool.qsize() == MAXSIZE - 2 pool.urlopen("GET", "/") assert pool.pool.qsize() == MAXSIZE - 2 def test_release_conn_parameter(self) -> None: MAXSIZE = 5 with HTTPConnectionPool(self.host, self.port, maxsize=MAXSIZE) as pool: assert pool.pool is not None assert pool.pool.qsize() == MAXSIZE # Make request without releasing connection pool.request("GET", "/", release_conn=False, preload_content=False) assert pool.pool.qsize() == MAXSIZE - 1 def test_dns_error(self) -> None: with HTTPConnectionPool( "thishostdoesnotexist.invalid", self.port, timeout=0.001 ) as pool: with pytest.raises(MaxRetryError): pool.request("GET", "/test", retries=2) @pytest.mark.parametrize("char", [" ", "\r", "\n", "\x00"]) def test_invalid_method_not_allowed(self, char: str) -> None: with pytest.raises(ValueError): with HTTPConnectionPool(self.host, self.port) as pool: pool.request("GET" + char, "/") def test_percent_encode_invalid_target_chars(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/echo_params?q=\r&k=\n \n") assert r.data == b"[('k', '\\n \\n'), ('q', '\\r')]" def test_source_address(self) -> None: for addr, is_ipv6 in VALID_SOURCE_ADDRESSES: if is_ipv6: # TODO enable if HAS_IPV6_AND_DNS when this is fixed: # https://github.com/pgjones/hypercorn/issues/160 warnings.warn("No IPv6 support: skipping.", NoIPv6Warning) continue with HTTPConnectionPool( self.host, self.port, source_address=addr, retries=False ) as pool: r = pool.request("GET", "/source_address") assert r.data == addr[0].encode() @pytest.mark.parametrize( "invalid_source_address, is_ipv6", INVALID_SOURCE_ADDRESSES ) def test_source_address_error( self, invalid_source_address: tuple[str, int], is_ipv6: bool ) -> None: with HTTPConnectionPool( self.host, self.port, source_address=invalid_source_address, retries=False ) as pool: if is_ipv6: with pytest.raises(NameResolutionError): pool.request("GET", f"/source_address?{invalid_source_address}") else: with pytest.raises(NewConnectionError): pool.request("GET", f"/source_address?{invalid_source_address}") def test_stream_keepalive(self) -> None: x = 2 with HTTPConnectionPool(self.host, self.port) as pool: for _ in range(x): response = pool.request( "GET", "/chunked", headers={"Connection": "keep-alive"}, preload_content=False, retries=False, ) for chunk in response.stream(): assert chunk == b"123" assert pool.num_connections == 1 assert pool.num_requests == x def test_read_chunked_short_circuit(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/chunked", preload_content=False) response.read() with pytest.raises(StopIteration): next(response.read_chunked()) def test_read_chunked_on_closed_response(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/chunked", preload_content=False) response.close() with pytest.raises(StopIteration): next(response.read_chunked()) def test_chunked_gzip(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request( "GET", "/chunked_gzip", preload_content=False, decode_content=True ) assert b"123" * 4 == response.read() def test_cleanup_on_connection_error(self) -> None: """ Test that connections are recycled to the pool on connection errors where no http response is received. """ poolsize = 3 with HTTPConnectionPool( self.host, self.port, maxsize=poolsize, block=True ) as http: assert http.pool is not None assert http.pool.qsize() == poolsize # force a connection error by supplying a non-existent # url. We won't get a response for this and so the # conn won't be implicitly returned to the pool. with pytest.raises(MaxRetryError): http.request( "GET", "/redirect", fields={"target": "/"}, release_conn=False, retries=0, ) r = http.request( "GET", "/redirect", fields={"target": "/"}, release_conn=False, retries=1, ) r.release_conn() # the pool should still contain poolsize elements assert http.pool.qsize() == http.pool.maxsize def test_mixed_case_hostname(self) -> None: with HTTPConnectionPool("LoCaLhOsT", self.port) as pool: response = pool.request("GET", f"http://LoCaLhOsT:{self.port}/") assert response.status == 200 def test_preserves_path_dot_segments(self) -> None: """ConnectionPool preserves dot segments in the URI""" with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/echo_uri/seg0/../seg2") assert response.data == b"/echo_uri/seg0/../seg2?" def test_default_user_agent_header(self) -> None: """ConnectionPool has a default user agent""" default_ua = _get_default_user_agent() custom_ua = "I'm not a web scraper, what are you talking about?" custom_ua2 = "Yet Another User Agent" with HTTPConnectionPool(self.host, self.port) as pool: # Use default user agent if no user agent was specified. r = pool.request("GET", "/headers") request_headers = r.json() assert request_headers.get("User-Agent") == _get_default_user_agent() # Prefer the request user agent over the default. headers = {"UsEr-AGENt": custom_ua} r = pool.request("GET", "/headers", headers=headers) request_headers = r.json() assert request_headers.get("User-Agent") == custom_ua # Do not modify pool headers when using the default user agent. pool_headers = {"foo": "bar"} pool.headers = pool_headers r = pool.request("GET", "/headers") request_headers = r.json() assert request_headers.get("User-Agent") == default_ua assert "User-Agent" not in pool_headers pool.headers.update({"User-Agent": custom_ua2}) r = pool.request("GET", "/headers") request_headers = r.json() assert request_headers.get("User-Agent") == custom_ua2 @pytest.mark.parametrize( "headers", [ None, {}, {"User-Agent": "key"}, {"user-agent": "key"}, {b"uSeR-AgEnT": b"key"}, {b"user-agent": "key"}, ], ) @pytest.mark.parametrize("chunked", [True, False]) def test_user_agent_header_not_sent_twice( self, headers: dict[str, str] | None, chunked: bool ) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/headers", headers=headers, chunked=chunked) request_headers = r.json() if not headers: assert request_headers["User-Agent"].startswith("python-urllib3/") assert "key" not in request_headers["User-Agent"] else: assert request_headers["User-Agent"] == "key" def test_no_user_agent_header(self) -> None: """ConnectionPool can suppress sending a user agent header""" custom_ua = "I'm not a web scraper, what are you talking about?" with HTTPConnectionPool(self.host, self.port) as pool: # Suppress user agent in the request headers. no_ua_headers = {"User-Agent": SKIP_HEADER} r = pool.request("GET", "/headers", headers=no_ua_headers) request_headers = r.json() assert "User-Agent" not in request_headers assert no_ua_headers["User-Agent"] == SKIP_HEADER # Suppress user agent in the pool headers. pool.headers = no_ua_headers r = pool.request("GET", "/headers") request_headers = r.json() assert "User-Agent" not in request_headers assert no_ua_headers["User-Agent"] == SKIP_HEADER # Request headers override pool headers. pool_headers = {"User-Agent": custom_ua} pool.headers = pool_headers r = pool.request("GET", "/headers", headers=no_ua_headers) request_headers = r.json() assert "User-Agent" not in request_headers assert no_ua_headers["User-Agent"] == SKIP_HEADER assert pool_headers.get("User-Agent") == custom_ua @pytest.mark.parametrize("header", ["Content-Length", "content-length"]) @pytest.mark.parametrize("chunked", [True, False]) def test_skip_header_non_supported(self, header: str, chunked: bool) -> None: with HTTPConnectionPool(self.host, self.port) as pool: with pytest.raises( ValueError, match="urllib3.util.SKIP_HEADER only supports 'Accept-Encoding', 'Host', 'User-Agent'", ) as e: pool.request( "GET", "/headers", headers={header: SKIP_HEADER}, chunked=chunked ) # Ensure that the error message stays up to date with 'SKIP_HEADER_SUPPORTED_HEADERS' assert all( ("'" + header.title() + "'") in str(e.value) for header in SKIPPABLE_HEADERS ) @pytest.mark.parametrize("chunked", [True, False]) @pytest.mark.parametrize("pool_request", [True, False]) @pytest.mark.parametrize("header_type", [dict, HTTPHeaderDict]) def test_headers_not_modified_by_request( self, chunked: bool, pool_request: bool, header_type: type[dict[str, str] | HTTPHeaderDict], ) -> None: # Test that the .request*() methods of ConnectionPool and HTTPConnection # don't modify the given 'headers' structure, instead they should # make their own internal copies at request time. headers = header_type() headers["key"] = "val" with HTTPConnectionPool(self.host, self.port) as pool: pool.headers = headers if pool_request: pool.request("GET", "/headers", chunked=chunked) else: conn = pool._get_conn() conn.request("GET", "/headers", chunked=chunked) conn.getresponse().close() conn.close() assert pool.headers == {"key": "val"} assert type(pool.headers) is header_type with HTTPConnectionPool(self.host, self.port) as pool: if pool_request: pool.request("GET", "/headers", headers=headers, chunked=chunked) else: conn = pool._get_conn() conn.request("GET", "/headers", headers=headers, chunked=chunked) conn.getresponse().close() conn.close() assert headers == {"key": "val"} def test_request_chunked_is_deprecated( self, ) -> None: with HTTPConnectionPool(self.host, self.port) as pool: conn = pool._get_conn() with pytest.warns(DeprecationWarning) as w: conn.request_chunked("GET", "/headers") # type: ignore[attr-defined] assert len(w) == 1 and str(w[0].message) == ( "HTTPConnection.request_chunked() is deprecated and will be removed in urllib3 v2.1.0. " "Instead use HTTPConnection.request(..., chunked=True)." ) resp = conn.getresponse() assert resp.status == 200 assert resp.json()["Transfer-Encoding"] == "chunked" conn.close() def test_bytes_header(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: headers = {"User-Agent": "test header"} r = pool.request("GET", "/headers", headers=headers) request_headers = r.json() assert "User-Agent" in request_headers assert request_headers["User-Agent"] == "test header" @pytest.mark.parametrize( "user_agent", ["Schönefeld/1.18.0", "Schönefeld/1.18.0".encode("iso-8859-1")] ) def test_user_agent_non_ascii_user_agent(self, user_agent: str) -> None: with HTTPConnectionPool(self.host, self.port, retries=False) as pool: r = pool.urlopen( "GET", "/headers", headers={"User-Agent": user_agent}, ) request_headers = r.json() assert "User-Agent" in request_headers assert request_headers["User-Agent"] == "Schönefeld/1.18.0" class TestRetry(HypercornDummyServerTestCase): def test_max_retry(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: with pytest.raises(MaxRetryError): pool.request("GET", "/redirect", fields={"target": "/"}, retries=0) def test_disabled_retry(self) -> None: """Disabled retries should disable redirect handling.""" with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/redirect", fields={"target": "/"}, retries=False) assert r.status == 303 r = pool.request( "GET", "/redirect", fields={"target": "/"}, retries=Retry(redirect=False), ) assert r.status == 303 with HTTPConnectionPool( "thishostdoesnotexist.invalid", self.port, timeout=0.001 ) as pool: with pytest.raises(NameResolutionError): pool.request("GET", "/test", retries=False) def test_read_retries(self) -> None: """Should retry for status codes in the forcelist""" with HTTPConnectionPool(self.host, self.port) as pool: retry = Retry(read=1, status_forcelist=[418]) resp = pool.request( "GET", "/successful_retry", headers={"test-name": "test_read_retries"}, retries=retry, ) assert resp.status == 200 def test_read_total_retries(self) -> None: """HTTP response w/ status code in the forcelist should be retried""" with HTTPConnectionPool(self.host, self.port) as pool: headers = {"test-name": "test_read_total_retries"} retry = Retry(total=1, status_forcelist=[418]) resp = pool.request( "GET", "/successful_retry", headers=headers, retries=retry ) assert resp.status == 200 def test_retries_wrong_forcelist(self) -> None: """HTTP response w/ status code not in forcelist shouldn't be retried""" with HTTPConnectionPool(self.host, self.port) as pool: retry = Retry(total=1, status_forcelist=[202]) resp = pool.request( "GET", "/successful_retry", headers={"test-name": "test_wrong_forcelist"}, retries=retry, ) assert resp.status == 418 def test_default_method_forcelist_retried(self) -> None: """urllib3 should retry methods in the default method forcelist""" with HTTPConnectionPool(self.host, self.port) as pool: retry = Retry(total=1, status_forcelist=[418]) resp = pool.request( "OPTIONS", "/successful_retry", headers={"test-name": "test_default_forcelist"}, retries=retry, ) assert resp.status == 200 def test_retries_wrong_method_list(self) -> None: """Method not in our allowed list should not be retried, even if code matches""" with HTTPConnectionPool(self.host, self.port) as pool: headers = {"test-name": "test_wrong_allowed_method"} retry = Retry(total=1, status_forcelist=[418], allowed_methods=["POST"]) resp = pool.request( "GET", "/successful_retry", headers=headers, retries=retry ) assert resp.status == 418 def test_read_retries_unsuccessful(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: headers = {"test-name": "test_read_retries_unsuccessful"} resp = pool.request("GET", "/successful_retry", headers=headers, retries=1) assert resp.status == 418 def test_retry_reuse_safe(self) -> None: """It should be possible to reuse a Retry object across requests""" with HTTPConnectionPool(self.host, self.port) as pool: headers = {"test-name": "test_retry_safe"} retry = Retry(total=1, status_forcelist=[418]) resp = pool.request( "GET", "/successful_retry", headers=headers, retries=retry ) assert resp.status == 200 with HTTPConnectionPool(self.host, self.port) as pool: resp = pool.request( "GET", "/successful_retry", headers=headers, retries=retry ) assert resp.status == 200 def test_retry_return_in_response(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: headers = {"test-name": "test_retry_return_in_response"} retry = Retry(total=2, status_forcelist=[418]) resp = pool.request( "GET", "/successful_retry", headers=headers, retries=retry ) assert resp.status == 200 assert resp.retries is not None assert resp.retries.total == 1 assert resp.retries.history == ( RequestHistory("GET", "/successful_retry", None, 418, None), ) def test_retry_redirect_history(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: resp = pool.request("GET", "/redirect", fields={"target": "/"}) assert resp.status == 200 assert resp.retries is not None assert resp.retries.history == ( RequestHistory("GET", "/redirect?target=%2F", None, 303, "/"), ) def test_multi_redirect_history(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request( "GET", "/multi_redirect", fields={"redirect_codes": "303,302,200"}, redirect=False, ) assert r.status == 303 assert r.retries is not None assert r.retries.history == tuple() with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request( "GET", "/multi_redirect", retries=10, fields={"redirect_codes": "303,302,301,307,302,200"}, ) assert r.status == 200 assert r.data == b"Done redirecting" expected = [ (303, "/multi_redirect?redirect_codes=302,301,307,302,200"), (302, "/multi_redirect?redirect_codes=301,307,302,200"), (301, "/multi_redirect?redirect_codes=307,302,200"), (307, "/multi_redirect?redirect_codes=302,200"), (302, "/multi_redirect?redirect_codes=200"), ] assert r.retries is not None actual = [ (history.status, history.redirect_location) for history in r.retries.history ] assert actual == expected class TestRetryAfter(HypercornDummyServerTestCase): def test_retry_after(self) -> None: # Request twice in a second to get a 429 response. with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request( "GET", "/retry_after", fields={"status": "429 Too Many Requests"}, retries=False, ) r = pool.request( "GET", "/retry_after", fields={"status": "429 Too Many Requests"}, retries=False, ) assert r.status == 429 r = pool.request( "GET", "/retry_after", fields={"status": "429 Too Many Requests"}, retries=True, ) assert r.status == 200 # Request twice in a second to get a 503 response. r = pool.request( "GET", "/retry_after", fields={"status": "503 Service Unavailable"}, retries=False, ) r = pool.request( "GET", "/retry_after", fields={"status": "503 Service Unavailable"}, retries=False, ) assert r.status == 503 r = pool.request( "GET", "/retry_after", fields={"status": "503 Service Unavailable"}, retries=True, ) assert r.status == 200 # Ignore Retry-After header on status which is not defined in # Retry.RETRY_AFTER_STATUS_CODES. r = pool.request( "GET", "/retry_after", fields={"status": "418 I'm a teapot"}, retries=True, ) assert r.status == 418 def test_redirect_after(self) -> None: with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/redirect_after", retries=False) assert r.status == 303 t = time.time() r = pool.request("GET", "/redirect_after") assert r.status == 200 delta = time.time() - t assert delta >= 1 t = time.time() timestamp = t + 2 r = pool.request("GET", "/redirect_after?date=" + str(timestamp)) assert r.status == 200 delta = time.time() - t assert delta >= 1 # Retry-After is past t = time.time() timestamp = t - 1 r = pool.request("GET", "/redirect_after?date=" + str(timestamp)) delta = time.time() - t assert r.status == 200 assert delta < 1 class TestFileBodiesOnRetryOrRedirect(HypercornDummyServerTestCase): def test_retries_put_filehandle(self) -> None: """HTTP PUT retry with a file-like object should not timeout""" with HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT) as pool: retry = Retry(total=3, status_forcelist=[418]) # httplib reads in 8k chunks; use a larger content length content_length = 65535 data = b"A" * content_length uploaded_file = io.BytesIO(data) headers = { "test-name": "test_retries_put_filehandle", "Content-Length": str(content_length), } resp = pool.urlopen( "PUT", "/successful_retry", headers=headers, retries=retry, body=uploaded_file, assert_same_host=False, redirect=False, ) assert resp.status == 200 def test_redirect_put_file(self) -> None: """PUT with file object should work with a redirection response""" with HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT) as pool: retry = Retry(total=3, status_forcelist=[418]) # httplib reads in 8k chunks; use a larger content length content_length = 65535 data = b"A" * content_length uploaded_file = io.BytesIO(data) headers = { "test-name": "test_redirect_put_file", "Content-Length": str(content_length), } url = "/redirect?target=/echo&status=307" resp = pool.urlopen( "PUT", url, headers=headers, retries=retry, body=uploaded_file, assert_same_host=False, redirect=True, ) assert resp.status == 200 assert resp.data == data def test_redirect_with_failed_tell(self) -> None: """Abort request if failed to get a position from tell()""" class BadTellObject(io.BytesIO): def tell(self) -> typing.NoReturn: raise OSError body = BadTellObject(b"the data") url = "/redirect?target=/successful_retry" # httplib uses fileno if Content-Length isn't supplied, # which is unsupported by BytesIO. headers = {"Content-Length": "8"} with HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT) as pool: with pytest.raises( UnrewindableBodyError, match="Unable to record file position for" ): pool.urlopen("PUT", url, headers=headers, body=body) class TestRetryPoolSize(HypercornDummyServerTestCase): def test_pool_size_retry(self) -> None: retries = Retry(total=1, raise_on_status=False, status_forcelist=[404]) with HTTPConnectionPool( self.host, self.port, maxsize=10, retries=retries, block=True ) as pool: pool.urlopen("GET", "/not_found", preload_content=False) assert pool.num_connections == 1 class TestRedirectPoolSize(HypercornDummyServerTestCase): def test_pool_size_redirect(self) -> None: retries = Retry( total=1, raise_on_status=False, status_forcelist=[404], redirect=True ) with HTTPConnectionPool( self.host, self.port, maxsize=10, retries=retries, block=True ) as pool: pool.urlopen("GET", "/redirect", preload_content=False) assert pool.num_connections == 1 with_dummyserver/test_https.py 0000644 00000130026 15025234504 0012736 0 ustar 00 from __future__ import annotations import contextlib import datetime import os.path import shutil import ssl import tempfile import warnings from pathlib import Path from test import ( LONG_TIMEOUT, SHORT_TIMEOUT, TARPIT_HOST, requires_network, resolvesLocalhostFQDN, ) from test.conftest import ServerConfig from unittest import mock import pytest import trustme import urllib3.util as util import urllib3.util.ssl_ from dummyserver.socketserver import ( DEFAULT_CA, DEFAULT_CA_KEY, DEFAULT_CERTS, encrypt_key_pem, ) from dummyserver.testcase import HTTPSHypercornDummyServerTestCase from urllib3 import HTTPSConnectionPool from urllib3.connection import RECENT_DATE, HTTPSConnection, VerifiedHTTPSConnection from urllib3.exceptions import ( ConnectTimeoutError, InsecureRequestWarning, MaxRetryError, ProtocolError, SSLError, SystemTimeWarning, ) from urllib3.util.ssl_match_hostname import CertificateError from urllib3.util.timeout import Timeout from .. import has_alpn TLSv1_CERTS = DEFAULT_CERTS.copy() TLSv1_CERTS["ssl_version"] = getattr(ssl, "PROTOCOL_TLSv1", None) TLSv1_1_CERTS = DEFAULT_CERTS.copy() TLSv1_1_CERTS["ssl_version"] = getattr(ssl, "PROTOCOL_TLSv1_1", None) TLSv1_2_CERTS = DEFAULT_CERTS.copy() TLSv1_2_CERTS["ssl_version"] = getattr(ssl, "PROTOCOL_TLSv1_2", None) TLSv1_3_CERTS = DEFAULT_CERTS.copy() TLSv1_3_CERTS["ssl_version"] = getattr(ssl, "PROTOCOL_TLS", None) CLIENT_INTERMEDIATE_PEM = "client_intermediate.pem" CLIENT_NO_INTERMEDIATE_PEM = "client_no_intermediate.pem" CLIENT_INTERMEDIATE_KEY = "client_intermediate.key" PASSWORD_CLIENT_KEYFILE = "client_password.key" CLIENT_CERT = CLIENT_INTERMEDIATE_PEM class BaseTestHTTPS(HTTPSHypercornDummyServerTestCase): tls_protocol_name: str | None = None def tls_protocol_not_default(self) -> bool: return self.tls_protocol_name in {"TLSv1", "TLSv1.1"} def tls_version(self) -> ssl.TLSVersion: if self.tls_protocol_name is None: return pytest.skip("Skipping base test class") try: from ssl import TLSVersion except ImportError: return pytest.skip("ssl.TLSVersion isn't available") return TLSVersion[self.tls_protocol_name.replace(".", "_")] def ssl_version(self) -> int: if self.tls_protocol_name is None: return pytest.skip("Skipping base test class") if self.tls_protocol_name == "TLSv1.3" and ssl.HAS_TLSv1_3: return ssl.PROTOCOL_TLS_CLIENT if self.tls_protocol_name == "TLSv1.2" and ssl.HAS_TLSv1_2: return ssl.PROTOCOL_TLSv1_2 if self.tls_protocol_name == "TLSv1.1" and ssl.HAS_TLSv1_1: return ssl.PROTOCOL_TLSv1_1 if self.tls_protocol_name == "TLSv1" and ssl.HAS_TLSv1: return ssl.PROTOCOL_TLSv1 else: return pytest.skip(f"{self.tls_protocol_name} isn't available") @classmethod def setup_class(cls) -> None: super().setup_class() cls.certs_dir = tempfile.mkdtemp() # Start from existing root CA as we don't want to change the server certificate yet with open(DEFAULT_CA, "rb") as crt, open(DEFAULT_CA_KEY, "rb") as key: root_ca = trustme.CA.from_pem(crt.read(), key.read()) # Generate another CA to test verification failure bad_ca = trustme.CA() cls.bad_ca_path = os.path.join(cls.certs_dir, "ca_bad.pem") bad_ca.cert_pem.write_to_path(cls.bad_ca_path) # client cert chain intermediate_ca = root_ca.create_child_ca() cert = intermediate_ca.issue_cert("example.com") encrypted_key = encrypt_key_pem(cert.private_key_pem, b"letmein") cert.private_key_pem.write_to_path( os.path.join(cls.certs_dir, CLIENT_INTERMEDIATE_KEY) ) encrypted_key.write_to_path( os.path.join(cls.certs_dir, PASSWORD_CLIENT_KEYFILE) ) # Write the client cert and the intermediate CA client_cert = os.path.join(cls.certs_dir, CLIENT_INTERMEDIATE_PEM) cert.cert_chain_pems[0].write_to_path(client_cert) cert.cert_chain_pems[1].write_to_path(client_cert, append=True) # Write only the client cert cert.cert_chain_pems[0].write_to_path( os.path.join(cls.certs_dir, CLIENT_NO_INTERMEDIATE_PEM) ) @classmethod def teardown_class(cls) -> None: super().teardown_class() shutil.rmtree(cls.certs_dir) def test_simple(self, http_version: str) -> None: with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: r = https_pool.request("GET", "/") assert r.status == 200, r.data assert r.headers["server"] == f"hypercorn-{http_version}" assert r.data == b"Dummy server!" @resolvesLocalhostFQDN() def test_dotted_fqdn(self) -> None: with HTTPSConnectionPool( self.host + ".", self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as pool: r = pool.request("GET", "/") assert r.status == 200, r.data def test_client_intermediate(self) -> None: """Check that certificate chains work well with client certs We generate an intermediate CA from the root CA, and issue a client certificate from that intermediate CA. Since the server only knows about the root CA, we need to send it the certificate *and* the intermediate CA, so that it can check the whole chain. """ with HTTPSConnectionPool( self.host, self.port, key_file=os.path.join(self.certs_dir, CLIENT_INTERMEDIATE_KEY), cert_file=os.path.join(self.certs_dir, CLIENT_INTERMEDIATE_PEM), ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: r = https_pool.request("GET", "/certificate") subject = r.json() assert subject["organizationalUnitName"].startswith("Testing cert") def test_client_no_intermediate(self) -> None: """Check that missing links in certificate chains indeed break The only difference with test_client_intermediate is that we don't send the intermediate CA to the server, only the client cert. """ with HTTPSConnectionPool( self.host, self.port, cert_file=os.path.join(self.certs_dir, CLIENT_NO_INTERMEDIATE_PEM), key_file=os.path.join(self.certs_dir, CLIENT_INTERMEDIATE_KEY), ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: with pytest.raises((SSLError, ProtocolError)): https_pool.request("GET", "/certificate", retries=False) def test_client_key_password(self) -> None: with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, key_file=os.path.join(self.certs_dir, PASSWORD_CLIENT_KEYFILE), cert_file=os.path.join(self.certs_dir, CLIENT_CERT), key_password="letmein", ssl_minimum_version=self.tls_version(), ) as https_pool: r = https_pool.request("GET", "/certificate") subject = r.json() assert subject["organizationalUnitName"].startswith("Testing cert") def test_client_encrypted_key_requires_password(self) -> None: with HTTPSConnectionPool( self.host, self.port, key_file=os.path.join(self.certs_dir, PASSWORD_CLIENT_KEYFILE), cert_file=os.path.join(self.certs_dir, CLIENT_CERT), key_password=None, ssl_minimum_version=self.tls_version(), ) as https_pool: with pytest.raises(MaxRetryError, match="password is required") as e: https_pool.request("GET", "/certificate") assert type(e.value.reason) is SSLError def test_verified(self) -> None: with HTTPSConnectionPool( self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: with contextlib.closing(https_pool._new_conn()) as conn: assert conn.__class__ == VerifiedHTTPSConnection with warnings.catch_warnings(record=True) as w: r = https_pool.request("GET", "/") assert r.status == 200 assert [str(wm) for wm in w] == [] def test_verified_with_context(self) -> None: ctx = util.ssl_.create_urllib3_context( cert_reqs=ssl.CERT_REQUIRED, ssl_minimum_version=self.tls_version() ) ctx.load_verify_locations(cafile=DEFAULT_CA) with HTTPSConnectionPool(self.host, self.port, ssl_context=ctx) as https_pool: with contextlib.closing(https_pool._new_conn()) as conn: assert conn.__class__ == VerifiedHTTPSConnection with mock.patch("warnings.warn") as warn: r = https_pool.request("GET", "/") assert r.status == 200 assert not warn.called, warn.call_args_list def test_context_combines_with_ca_certs(self) -> None: ctx = util.ssl_.create_urllib3_context( cert_reqs=ssl.CERT_REQUIRED, ssl_minimum_version=self.tls_version() ) with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_context=ctx ) as https_pool: with contextlib.closing(https_pool._new_conn()) as conn: assert conn.__class__ == VerifiedHTTPSConnection with mock.patch("warnings.warn") as warn: r = https_pool.request("GET", "/") assert r.status == 200 assert not warn.called, warn.call_args_list def test_ca_dir_verified(self, tmp_path: Path) -> None: # OpenSSL looks up certificates by the hash for their name, see c_rehash # TODO infer the bytes using `cryptography.x509.Name.public_bytes`. # https://github.com/pyca/cryptography/pull/3236 shutil.copyfile(DEFAULT_CA, str(tmp_path / "81deb5f7.0")) with HTTPSConnectionPool( self.host, self.port, cert_reqs="CERT_REQUIRED", ca_cert_dir=str(tmp_path), ssl_minimum_version=self.tls_version(), ) as https_pool: with contextlib.closing(https_pool._new_conn()) as conn: assert conn.__class__ == VerifiedHTTPSConnection with warnings.catch_warnings(record=True) as w: r = https_pool.request("GET", "/") assert r.status == 200 assert [str(wm) for wm in w] == [] def test_invalid_common_name(self) -> None: with HTTPSConnectionPool( "127.0.0.1", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: with pytest.raises(MaxRetryError) as e: https_pool.request("GET", "/", retries=0) assert type(e.value.reason) is SSLError assert "doesn't match" in str( e.value.reason ) or "certificate verify failed" in str(e.value.reason) def test_verified_with_bad_ca_certs(self) -> None: with HTTPSConnectionPool( self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=self.bad_ca_path, ssl_minimum_version=self.tls_version(), ) as https_pool: with pytest.raises(MaxRetryError) as e: https_pool.request("GET", "/") assert type(e.value.reason) is SSLError assert ( "certificate verify failed" in str(e.value.reason) # PyPy is more specific or "self signed certificate in certificate chain" in str(e.value.reason) ), f"Expected 'certificate verify failed', instead got: {e.value.reason!r}" def test_wrap_socket_failure_resource_leak(self) -> None: with HTTPSConnectionPool( self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=self.bad_ca_path, ssl_minimum_version=self.tls_version(), ) as https_pool: with contextlib.closing(https_pool._get_conn()) as conn: with pytest.raises(ssl.SSLError): conn.connect() assert conn.sock is not None # type: ignore[attr-defined] def test_verified_without_ca_certs(self) -> None: # default is cert_reqs=None which is ssl.CERT_NONE with HTTPSConnectionPool( self.host, self.port, cert_reqs="CERT_REQUIRED", ssl_minimum_version=self.tls_version(), ) as https_pool: with pytest.raises(MaxRetryError) as e: https_pool.request("GET", "/") assert type(e.value.reason) is SSLError # there is a different error message depending on whether or # not pyopenssl is injected assert ( "No root certificates specified" in str(e.value.reason) # PyPy is more specific or "self signed certificate in certificate chain" in str(e.value.reason) # PyPy sometimes uses all-caps here or "certificate verify failed" in str(e.value.reason).lower() or "invalid certificate chain" in str(e.value.reason) ), ( "Expected 'No root certificates specified', " "'certificate verify failed', or " "'invalid certificate chain', " "instead got: %r" % e.value.reason ) def test_no_ssl(self) -> None: with HTTPSConnectionPool(self.host, self.port) as pool: pool.ConnectionCls = None # type: ignore[assignment] with pytest.raises(ImportError): pool._new_conn() with pytest.raises(ImportError): pool.request("GET", "/", retries=0) def test_unverified_ssl(self) -> None: """Test that bare HTTPSConnection can connect, make requests""" with HTTPSConnectionPool( self.host, self.port, cert_reqs=ssl.CERT_NONE, ssl_minimum_version=self.tls_version(), ) as pool: with mock.patch("warnings.warn") as warn: r = pool.request("GET", "/") assert r.status == 200 assert warn.called # Modern versions of Python, or systems using PyOpenSSL, only emit # the unverified warning. Older systems may also emit other # warnings, which we want to ignore here. calls = warn.call_args_list assert InsecureRequestWarning in [x[0][1] for x in calls] def test_ssl_unverified_with_ca_certs(self) -> None: with HTTPSConnectionPool( self.host, self.port, cert_reqs="CERT_NONE", ca_certs=self.bad_ca_path, ssl_minimum_version=self.tls_version(), ) as pool: with mock.patch("warnings.warn") as warn: r = pool.request("GET", "/") assert r.status == 200 assert warn.called # Modern versions of Python, or systems using PyOpenSSL, only emit # the unverified warning. Older systems may also emit other # warnings, which we want to ignore here. calls = warn.call_args_list category = calls[0][0][1] assert category == InsecureRequestWarning def test_assert_hostname_false(self) -> None: with HTTPSConnectionPool( "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.assert_hostname = False https_pool.request("GET", "/") def test_assert_specific_hostname(self) -> None: with HTTPSConnectionPool( "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.assert_hostname = "localhost" https_pool.request("GET", "/") def test_server_hostname(self) -> None: with HTTPSConnectionPool( "127.0.0.1", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, server_hostname="localhost", ssl_minimum_version=self.tls_version(), ) as https_pool: conn = https_pool._new_conn() conn.request("GET", "/") # Assert the wrapping socket is using the passed-through SNI name. # pyopenssl doesn't let you pull the server_hostname back off the # socket, so only add this assertion if the attribute is there (i.e. # the python ssl module). if hasattr(conn.sock, "server_hostname"): # type: ignore[attr-defined] assert conn.sock.server_hostname == "localhost" # type: ignore[attr-defined] conn.getresponse().close() conn.close() def test_assert_fingerprint_md5(self) -> None: with HTTPSConnectionPool( "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, assert_fingerprint=("55:39:BF:70:05:12:43:FA:1F:D1:BF:4E:E8:1B:07:1D"), ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.request("GET", "/") def test_assert_fingerprint_sha1(self) -> None: with HTTPSConnectionPool( "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, assert_fingerprint=( "72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A" ), ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.request("GET", "/") def test_assert_fingerprint_sha256(self) -> None: with HTTPSConnectionPool( "localhost", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, assert_fingerprint=( "E3:59:8E:69:FF:C5:9F:C7:88:87:44:58:22:7F:90:8D:D9:BC:12:C4:90:79:D5:" "DC:A8:5D:4F:60:40:1E:A6:D2" ), ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.request("GET", "/") def test_assert_invalid_fingerprint(self) -> None: def _test_request(pool: HTTPSConnectionPool) -> SSLError: with pytest.raises(MaxRetryError) as cm: pool.request("GET", "/", retries=0) assert type(cm.value.reason) is SSLError return cm.value.reason with HTTPSConnectionPool( self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.assert_fingerprint = ( "AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA:AA" ) e = _test_request(https_pool) expected = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" got = "728b554c9afc1e88a11cad1bb2e7cc3edbc8f98a" assert ( str(e) == f'Fingerprints did not match. Expected "{expected}", got "{got}"' ) # Uneven length https_pool.assert_fingerprint = "AA:A" e = _test_request(https_pool) assert "Fingerprint of invalid length:" in str(e) # Invalid length https_pool.assert_fingerprint = "AA" e = _test_request(https_pool) assert "Fingerprint of invalid length:" in str(e) def test_verify_none_and_bad_fingerprint(self) -> None: with HTTPSConnectionPool( "127.0.0.1", self.port, cert_reqs="CERT_NONE", assert_hostname=False, assert_fingerprint=( "AA:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A" ), ) as https_pool: with pytest.raises(MaxRetryError) as cm: https_pool.request("GET", "/", retries=0) assert type(cm.value.reason) is SSLError def test_verify_none_and_good_fingerprint(self) -> None: with HTTPSConnectionPool( "127.0.0.1", self.port, cert_reqs="CERT_NONE", assert_hostname=False, assert_fingerprint=( "72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A" ), ) as https_pool: https_pool.request("GET", "/") def test_good_fingerprint_and_hostname_mismatch(self) -> None: with HTTPSConnectionPool( "127.0.0.1", self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, assert_fingerprint=( "72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A" ), ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.request("GET", "/") @requires_network() def test_https_timeout(self) -> None: timeout = Timeout(total=None, connect=SHORT_TIMEOUT) with HTTPSConnectionPool( TARPIT_HOST, self.port, timeout=timeout, retries=False, cert_reqs="CERT_REQUIRED", ssl_minimum_version=self.tls_version(), ) as https_pool: with pytest.raises(ConnectTimeoutError): https_pool.request("GET", "/") timeout = Timeout(read=0.01) with HTTPSConnectionPool( self.host, self.port, timeout=timeout, retries=False, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, assert_fingerprint=( "72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A" ), ssl_minimum_version=self.tls_version(), ) as https_pool: # TODO This was removed in https://github.com/urllib3/urllib3/pull/703/files # We need to put something back or remove this block. pass timeout = Timeout(total=None) with HTTPSConnectionPool( self.host, self.port, timeout=timeout, cert_reqs="CERT_NONE", ssl_minimum_version=self.tls_version(), ) as https_pool: with pytest.warns(InsecureRequestWarning): https_pool.request("GET", "/") def test_tunnel(self) -> None: """test the _tunnel behavior""" timeout = Timeout(total=None) with HTTPSConnectionPool( self.host, self.port, timeout=timeout, cert_reqs="CERT_NONE", ssl_minimum_version=self.tls_version(), ) as https_pool: with contextlib.closing(https_pool._new_conn()) as conn: conn.set_tunnel(self.host, self.port) with mock.patch.object( conn, "_tunnel", create=True, return_value=None ) as conn_tunnel: with pytest.warns(InsecureRequestWarning): https_pool._make_request(conn, "GET", "/") conn_tunnel.assert_called_once_with() @requires_network() def test_enhanced_timeout(self) -> None: with HTTPSConnectionPool( TARPIT_HOST, self.port, timeout=Timeout(connect=SHORT_TIMEOUT), retries=False, cert_reqs="CERT_REQUIRED", ) as https_pool: with contextlib.closing(https_pool._new_conn()) as conn: with pytest.raises(ConnectTimeoutError): https_pool.request("GET", "/") with pytest.raises(ConnectTimeoutError): https_pool._make_request(conn, "GET", "/") with HTTPSConnectionPool( TARPIT_HOST, self.port, timeout=Timeout(connect=LONG_TIMEOUT), retries=False, cert_reqs="CERT_REQUIRED", ) as https_pool: with pytest.raises(ConnectTimeoutError): https_pool.request("GET", "/", timeout=Timeout(connect=SHORT_TIMEOUT)) with HTTPSConnectionPool( TARPIT_HOST, self.port, timeout=Timeout(total=None), retries=False, cert_reqs="CERT_REQUIRED", ) as https_pool: with contextlib.closing(https_pool._new_conn()) as conn: with pytest.raises(ConnectTimeoutError): https_pool.request( "GET", "/", timeout=Timeout(total=None, connect=SHORT_TIMEOUT) ) def test_enhanced_ssl_connection(self) -> None: fingerprint = "72:8B:55:4C:9A:FC:1E:88:A1:1C:AD:1B:B2:E7:CC:3E:DB:C8:F9:8A" with HTTPSConnectionPool( self.host, self.port, cert_reqs="CERT_REQUIRED", ca_certs=DEFAULT_CA, assert_fingerprint=fingerprint, ssl_minimum_version=self.tls_version(), ) as https_pool: r = https_pool.request("GET", "/") assert r.status == 200 def test_ssl_correct_system_time(self) -> None: with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.cert_reqs = "CERT_REQUIRED" https_pool.ca_certs = DEFAULT_CA w = self._request_without_resource_warnings("GET", "/") assert [] == w def test_ssl_wrong_system_time(self) -> None: with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.cert_reqs = "CERT_REQUIRED" https_pool.ca_certs = DEFAULT_CA with mock.patch("urllib3.connection.datetime") as mock_date: mock_date.date.today.return_value = datetime.date(1970, 1, 1) w = self._request_without_resource_warnings("GET", "/") assert len(w) == 1 warning = w[0] assert SystemTimeWarning == warning.category assert isinstance(warning.message, Warning) assert str(RECENT_DATE) in warning.message.args[0] def _request_without_resource_warnings( self, method: str, url: str ) -> list[warnings.WarningMessage]: with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: https_pool.request(method, url) w = [x for x in w if not isinstance(x.message, ResourceWarning)] return w def test_set_ssl_version_to_tls_version(self) -> None: if self.tls_protocol_name is None: pytest.skip("Skipping base test class") with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA ) as https_pool: https_pool.ssl_version = ssl_version = self.certs["ssl_version"] if ssl_version is getattr(ssl, "PROTOCOL_TLS", object()): cmgr: contextlib.AbstractContextManager[ object ] = contextlib.nullcontext() else: cmgr = pytest.warns( DeprecationWarning, match=r"'ssl_version' option is deprecated and will be removed " r"in urllib3 v2\.1\.0\. Instead use 'ssl_minimum_version'", ) with cmgr: r = https_pool.request("GET", "/") assert r.status == 200, r.data def test_set_cert_default_cert_required(self) -> None: conn = VerifiedHTTPSConnection(self.host, self.port) with pytest.warns(DeprecationWarning) as w: conn.set_cert() assert conn.cert_reqs == ssl.CERT_REQUIRED assert len(w) == 1 and str(w[0].message) == ( "HTTPSConnection.set_cert() is deprecated and will be removed in urllib3 v2.1.0. " "Instead provide the parameters to the HTTPSConnection constructor." ) @pytest.mark.parametrize("verify_mode", [ssl.CERT_NONE, ssl.CERT_REQUIRED]) def test_set_cert_inherits_cert_reqs_from_ssl_context( self, verify_mode: int ) -> None: ssl_context = urllib3.util.ssl_.create_urllib3_context(cert_reqs=verify_mode) assert ssl_context.verify_mode == verify_mode conn = HTTPSConnection(self.host, self.port, ssl_context=ssl_context) with pytest.warns(DeprecationWarning) as w: conn.set_cert() assert conn.cert_reqs == verify_mode assert ( conn.ssl_context is not None and conn.ssl_context.verify_mode == verify_mode ) assert len(w) == 1 and str(w[0].message) == ( "HTTPSConnection.set_cert() is deprecated and will be removed in urllib3 v2.1.0. " "Instead provide the parameters to the HTTPSConnection constructor." ) def test_tls_protocol_name_of_socket(self) -> None: if self.tls_protocol_name is None: pytest.skip("Skipping base test class") with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ssl_maximum_version=self.tls_version(), ) as https_pool: with contextlib.closing(https_pool._get_conn()) as conn: conn.connect() if not hasattr(conn.sock, "version"): # type: ignore[attr-defined] pytest.skip("SSLSocket.version() not available") assert conn.sock.version() == self.tls_protocol_name # type: ignore[attr-defined] def test_ssl_version_is_deprecated(self) -> None: if self.tls_protocol_name is None: pytest.skip("Skipping base test class") if self.ssl_version() == ssl.PROTOCOL_TLS_CLIENT: pytest.skip( "Skipping because ssl_version=ssl.PROTOCOL_TLS_CLIENT is not deprecated" ) with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=self.ssl_version() ) as https_pool: with contextlib.closing(https_pool._get_conn()) as conn: with pytest.warns(DeprecationWarning) as w: conn.connect() assert len(w) >= 1 assert any(x.category == DeprecationWarning for x in w) assert any( str(x.message) == ( "'ssl_version' option is deprecated and will be removed in " "urllib3 v2.1.0. Instead use 'ssl_minimum_version'" ) for x in w ) @pytest.mark.parametrize( "ssl_version", [None, ssl.PROTOCOL_TLS, ssl.PROTOCOL_TLS_CLIENT] ) def test_ssl_version_with_protocol_tls_or_client_not_deprecated( self, ssl_version: int | None ) -> None: if self.tls_protocol_name is None: pytest.skip("Skipping base test class") if self.tls_protocol_not_default(): pytest.skip( f"Skipping because '{self.tls_protocol_name}' isn't set by default" ) with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_version=ssl_version ) as https_pool: with contextlib.closing(https_pool._get_conn()) as conn: with warnings.catch_warnings(record=True) as w: conn.connect() assert [str(wm) for wm in w if wm.category != ResourceWarning] == [] def test_no_tls_version_deprecation_with_ssl_context(self) -> None: if self.tls_protocol_name is None: pytest.skip("Skipping base test class") ctx = util.ssl_.create_urllib3_context(ssl_minimum_version=self.tls_version()) with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_context=ctx, ) as https_pool: with contextlib.closing(https_pool._get_conn()) as conn: with warnings.catch_warnings(record=True) as w: conn.connect() assert [str(wm) for wm in w if wm.category != ResourceWarning] == [] def test_tls_version_maximum_and_minimum(self) -> None: if self.tls_protocol_name is None: pytest.skip("Skipping base test class") from ssl import TLSVersion min_max_versions = [ (self.tls_version(), self.tls_version()), (TLSVersion.MINIMUM_SUPPORTED, self.tls_version()), (TLSVersion.MINIMUM_SUPPORTED, TLSVersion.MAXIMUM_SUPPORTED), ] for minimum_version, maximum_version in min_max_versions: with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=minimum_version, ssl_maximum_version=maximum_version, ) as https_pool: conn = https_pool._get_conn() try: conn.connect() if maximum_version == TLSVersion.MAXIMUM_SUPPORTED: # A higher protocol than tls_protocol_name could be negotiated assert conn.sock.version() >= self.tls_protocol_name # type: ignore[attr-defined] else: assert conn.sock.version() == self.tls_protocol_name # type: ignore[attr-defined] finally: conn.close() def test_sslkeylogfile( self, tmp_path: Path, monkeypatch: pytest.MonkeyPatch ) -> None: if not hasattr(util.SSLContext, "keylog_filename"): pytest.skip("requires OpenSSL 1.1.1+") keylog_file = tmp_path / "keylogfile.txt" monkeypatch.setenv("SSLKEYLOGFILE", str(keylog_file)) with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as https_pool: r = https_pool.request("GET", "/") assert r.status == 200, r.data assert keylog_file.is_file(), "keylogfile '%s' should exist" % str( keylog_file ) assert keylog_file.read_text().startswith( "# TLS secrets log file" ), "keylogfile '%s' should start with '# TLS secrets log file'" % str( keylog_file ) @pytest.mark.parametrize("sslkeylogfile", [None, ""]) def test_sslkeylogfile_empty( self, monkeypatch: pytest.MonkeyPatch, sslkeylogfile: str | None ) -> None: # Assert that an HTTPS connection doesn't error out when given # no SSLKEYLOGFILE or an empty value (ie 'SSLKEYLOGFILE=') if sslkeylogfile is not None: monkeypatch.setenv("SSLKEYLOGFILE", sslkeylogfile) else: monkeypatch.delenv("SSLKEYLOGFILE", raising=False) with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as pool: r = pool.request("GET", "/") assert r.status == 200, r.data def test_alpn_default(self) -> None: """Default ALPN protocols are sent by default.""" if not has_alpn() or not has_alpn(ssl.SSLContext): pytest.skip("ALPN-support not available") with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, ssl_minimum_version=self.tls_version(), ) as pool: r = pool.request("GET", "/alpn_protocol", retries=0) assert r.status == 200 assert r.data.decode("utf-8") == util.ALPN_PROTOCOLS[0] def test_default_ssl_context_ssl_min_max_versions(self) -> None: ctx = urllib3.util.ssl_.create_urllib3_context() assert ctx.minimum_version == ssl.TLSVersion.TLSv1_2 # urllib3 sets a default maximum version only when it is # injected with PyOpenSSL SSL-support. # Otherwise, the default maximum version is set by Python's # `ssl.SSLContext`. The value respects OpenSSL configuration and # can be different from `ssl.TLSVersion.MAXIMUM_SUPPORTED`. # https://github.com/urllib3/urllib3/issues/2477#issuecomment-1151452150 if util.IS_PYOPENSSL: expected_maximum_version = ssl.TLSVersion.MAXIMUM_SUPPORTED else: expected_maximum_version = ssl.SSLContext( ssl.PROTOCOL_TLS_CLIENT ).maximum_version assert ctx.maximum_version == expected_maximum_version def test_ssl_context_ssl_version_uses_ssl_min_max_versions(self) -> None: if self.ssl_version() == ssl.PROTOCOL_TLS_CLIENT: pytest.skip( "Skipping because ssl_version=ssl.PROTOCOL_TLS_CLIENT is not deprecated" ) with pytest.warns( DeprecationWarning, match=r"'ssl_version' option is deprecated and will be removed in " r"urllib3 v2\.1\.0\. Instead use 'ssl_minimum_version'", ): ctx = urllib3.util.ssl_.create_urllib3_context( ssl_version=self.ssl_version() ) assert ctx.minimum_version == self.tls_version() assert ctx.maximum_version == self.tls_version() @pytest.mark.usefixtures("requires_tlsv1") class TestHTTPS_TLSv1(BaseTestHTTPS): tls_protocol_name = "TLSv1" certs = TLSv1_CERTS @pytest.mark.usefixtures("requires_tlsv1_1") class TestHTTPS_TLSv1_1(BaseTestHTTPS): tls_protocol_name = "TLSv1.1" certs = TLSv1_1_CERTS @pytest.mark.usefixtures("requires_tlsv1_2") class TestHTTPS_TLSv1_2(BaseTestHTTPS): tls_protocol_name = "TLSv1.2" certs = TLSv1_2_CERTS @pytest.mark.usefixtures("requires_tlsv1_3") class TestHTTPS_TLSv1_3(BaseTestHTTPS): tls_protocol_name = "TLSv1.3" certs = TLSv1_3_CERTS class TestHTTPS_Hostname: def test_can_validate_san(self, san_server: ServerConfig) -> None: """Ensure that urllib3 can validate SANs with IP addresses in them.""" with HTTPSConnectionPool( san_server.host, san_server.port, cert_reqs="CERT_REQUIRED", ca_certs=san_server.ca_certs, ) as https_pool: r = https_pool.request("GET", "/") assert r.status == 200 def test_common_name_without_san_fails(self, no_san_server: ServerConfig) -> None: with HTTPSConnectionPool( no_san_server.host, no_san_server.port, cert_reqs="CERT_REQUIRED", ca_certs=no_san_server.ca_certs, ) as https_pool: with pytest.raises( MaxRetryError, ) as e: https_pool.request("GET", "/") assert "mismatch, certificate is not valid" in str( e.value ) or "no appropriate subjectAltName" in str(e.value) def test_common_name_without_san_with_different_common_name( self, no_san_server_with_different_commmon_name: ServerConfig ) -> None: ctx = urllib3.util.ssl_.create_urllib3_context() try: ctx.hostname_checks_common_name = True except AttributeError: pytest.skip("Couldn't set 'SSLContext.hostname_checks_common_name'") with HTTPSConnectionPool( no_san_server_with_different_commmon_name.host, no_san_server_with_different_commmon_name.port, cert_reqs="CERT_REQUIRED", ca_certs=no_san_server_with_different_commmon_name.ca_certs, ssl_context=ctx, ) as https_pool: with pytest.raises(MaxRetryError) as e: https_pool.request("GET", "/") assert "mismatch, certificate is not valid for 'localhost'" in str( e.value ) or "hostname 'localhost' doesn't match 'example.com'" in str(e.value) @pytest.mark.parametrize("use_assert_hostname", [True, False]) def test_hostname_checks_common_name_respected( self, no_san_server: ServerConfig, use_assert_hostname: bool ) -> None: ctx = urllib3.util.ssl_.create_urllib3_context() if not hasattr(ctx, "hostname_checks_common_name"): pytest.skip("Test requires 'SSLContext.hostname_checks_common_name'") ctx.load_verify_locations(no_san_server.ca_certs) try: ctx.hostname_checks_common_name = True except AttributeError: pytest.skip("Couldn't set 'SSLContext.hostname_checks_common_name'") err: MaxRetryError | None try: with HTTPSConnectionPool( no_san_server.host, no_san_server.port, cert_reqs="CERT_REQUIRED", ssl_context=ctx, assert_hostname=no_san_server.host if use_assert_hostname else None, ) as https_pool: https_pool.request("GET", "/") except MaxRetryError as e: err = e else: err = None # commonName is only valid for DNS names, not IP addresses. if no_san_server.host == "localhost": assert err is None # IP addresses should fail for commonName. else: assert err is not None assert type(err.reason) is SSLError assert isinstance( err.reason.args[0], (ssl.SSLCertVerificationError, CertificateError) ) def test_assert_hostname_invalid_san( self, no_localhost_san_server: ServerConfig ) -> None: """Ensure SAN errors are not raised while assert_hostname is false""" with HTTPSConnectionPool( no_localhost_san_server.host, no_localhost_san_server.port, cert_reqs="CERT_REQUIRED", ca_certs=no_localhost_san_server.ca_certs, assert_hostname=False, ) as https_pool: https_pool.request("GET", "/") def test_assert_hostname_invalid_cn( self, no_san_server_with_different_commmon_name: ServerConfig ) -> None: """Ensure CN errors are not raised while assert_hostname is false""" with HTTPSConnectionPool( no_san_server_with_different_commmon_name.host, no_san_server_with_different_commmon_name.port, cert_reqs="CERT_REQUIRED", ca_certs=no_san_server_with_different_commmon_name.ca_certs, assert_hostname=False, ) as https_pool: https_pool.request("GET", "/") class TestHTTPS_IPV4SAN: def test_can_validate_ip_san(self, ipv4_san_server: ServerConfig) -> None: """Ensure that urllib3 can validate SANs with IP addresses in them.""" with HTTPSConnectionPool( ipv4_san_server.host, ipv4_san_server.port, cert_reqs="CERT_REQUIRED", ca_certs=ipv4_san_server.ca_certs, ) as https_pool: r = https_pool.request("GET", "/") assert r.status == 200 class TestHTTPS_IPV6SAN: @pytest.mark.parametrize("host", ["::1", "[::1]"]) def test_can_validate_ipv6_san( self, ipv6_san_server: ServerConfig, host: str, http_version: str ) -> None: """Ensure that urllib3 can validate SANs with IPv6 addresses in them.""" with HTTPSConnectionPool( host, ipv6_san_server.port, cert_reqs="CERT_REQUIRED", ca_certs=ipv6_san_server.ca_certs, ) as https_pool: r = https_pool.request("GET", "/") assert r.status == 200 assert r.headers["server"] == f"hypercorn-{http_version}" with_dummyserver/test_proxy_poolmanager.py 0000644 00000112760 15025234504 0015346 0 ustar 00 from __future__ import annotations import binascii import contextlib import hashlib import ipaddress import os.path import pathlib import shutil import socket import ssl import tempfile from test import LONG_TIMEOUT, SHORT_TIMEOUT, resolvesLocalhostFQDN, withPyOpenSSL from test.conftest import ServerConfig import pytest import trustme import urllib3.exceptions from dummyserver.socketserver import DEFAULT_CA, HAS_IPV6, get_unreachable_address from dummyserver.testcase import ( HypercornDummyProxyTestCase, IPv6HypercornDummyProxyTestCase, ) from urllib3 import HTTPResponse from urllib3._collections import HTTPHeaderDict from urllib3.connection import VerifiedHTTPSConnection from urllib3.connectionpool import connection_from_url from urllib3.exceptions import ( ConnectTimeoutError, InsecureRequestWarning, MaxRetryError, ProxyError, ProxySchemeUnknown, ProxySchemeUnsupported, ReadTimeoutError, SSLError, ) from urllib3.poolmanager import ProxyManager, proxy_from_url from urllib3.util.ssl_ import create_urllib3_context from urllib3.util.timeout import Timeout from .. import TARPIT_HOST, requires_network def assert_is_verified(pm: ProxyManager, *, proxy: bool, target: bool) -> None: pool = list(pm.pools._container.values())[-1] # retrieve last pool entry connection = ( pool.pool.queue[-1] if pool.pool is not None else None ) # retrieve last connection entry assert connection.proxy_is_verified is proxy assert connection.is_verified is target class TestHTTPProxyManager(HypercornDummyProxyTestCase): @classmethod def setup_class(cls) -> None: super().setup_class() cls.http_url = f"http://{cls.http_host}:{int(cls.http_port)}" cls.http_url_alt = f"http://{cls.http_host_alt}:{int(cls.http_port)}" cls.https_url = f"https://{cls.https_host}:{int(cls.https_port)}" cls.https_url_alt = f"https://{cls.https_host_alt}:{int(cls.https_port)}" cls.https_url_fqdn = f"https://{cls.https_host}.:{int(cls.https_port)}" cls.proxy_url = f"http://{cls.proxy_host}:{int(cls.proxy_port)}" cls.https_proxy_url = f"https://{cls.proxy_host}:{int(cls.https_proxy_port)}" # Generate another CA to test verification failure cls.certs_dir = tempfile.mkdtemp() bad_ca = trustme.CA() cls.bad_ca_path = os.path.join(cls.certs_dir, "ca_bad.pem") bad_ca.cert_pem.write_to_path(cls.bad_ca_path) @classmethod def teardown_class(cls) -> None: super().teardown_class() shutil.rmtree(cls.certs_dir) def test_basic_proxy(self) -> None: with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: r = http.request("GET", f"{self.http_url}/") assert r.status == 200 r = http.request("GET", f"{self.https_url}/") assert r.status == 200 def test_https_proxy(self) -> None: with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https: r = https.request("GET", f"{self.https_url}/") assert r.status == 200 r = https.request("GET", f"{self.http_url}/") assert r.status == 200 def test_is_verified_http_proxy_to_http_target(self) -> None: with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: r = http.request("GET", f"{self.http_url}/") assert r.status == 200 assert_is_verified(http, proxy=False, target=False) def test_is_verified_http_proxy_to_https_target(self) -> None: with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: r = http.request("GET", f"{self.https_url}/") assert r.status == 200 assert_is_verified(http, proxy=False, target=True) def test_is_verified_https_proxy_to_http_target(self) -> None: with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https: r = https.request("GET", f"{self.http_url}/") assert r.status == 200 assert_is_verified(https, proxy=True, target=False) def test_is_verified_https_proxy_to_https_target(self) -> None: with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https: r = https.request("GET", f"{self.https_url}/") assert r.status == 200 assert_is_verified(https, proxy=True, target=True) def test_http_and_https_kwarg_ca_cert_data_proxy(self) -> None: with open(DEFAULT_CA) as pem_file: pem_file_data = pem_file.read() with proxy_from_url(self.https_proxy_url, ca_cert_data=pem_file_data) as https: r = https.request("GET", f"{self.https_url}/") assert r.status == 200 r = https.request("GET", f"{self.http_url}/") assert r.status == 200 def test_https_proxy_with_proxy_ssl_context(self) -> None: proxy_ssl_context = create_urllib3_context() proxy_ssl_context.load_verify_locations(DEFAULT_CA) with proxy_from_url( self.https_proxy_url, proxy_ssl_context=proxy_ssl_context, ca_certs=DEFAULT_CA, ) as https: r = https.request("GET", f"{self.https_url}/") assert r.status == 200 r = https.request("GET", f"{self.http_url}/") assert r.status == 200 @withPyOpenSSL def test_https_proxy_pyopenssl_not_supported(self) -> None: with proxy_from_url(self.https_proxy_url, ca_certs=DEFAULT_CA) as https: r = https.request("GET", f"{self.http_url}/") assert r.status == 200 with pytest.raises( ProxySchemeUnsupported, match="isn't available on non-native SSLContext" ): https.request("GET", f"{self.https_url}/") def test_https_proxy_forwarding_for_https(self) -> None: with proxy_from_url( self.https_proxy_url, ca_certs=DEFAULT_CA, use_forwarding_for_https=True, ) as https: r = https.request("GET", f"{self.http_url}/") assert r.status == 200 r = https.request("GET", f"{self.https_url}/") assert r.status == 200 def test_nagle_proxy(self) -> None: """Test that proxy connections do not have TCP_NODELAY turned on""" with ProxyManager(self.proxy_url) as http: hc2 = http.connection_from_host(self.http_host, self.http_port) conn = hc2._get_conn() try: hc2._make_request(conn, "GET", f"{self.http_url}/") tcp_nodelay_setting = conn.sock.getsockopt( # type: ignore[attr-defined] socket.IPPROTO_TCP, socket.TCP_NODELAY ) assert tcp_nodelay_setting == 0, ( "Expected TCP_NODELAY for proxies to be set " "to zero, instead was %s" % tcp_nodelay_setting ) finally: conn.close() @pytest.mark.parametrize("proxy_scheme", ["http", "https"]) @pytest.mark.parametrize("target_scheme", ["http", "https"]) def test_proxy_conn_fail_from_dns( self, proxy_scheme: str, target_scheme: str ) -> None: host, port = get_unreachable_address() with proxy_from_url( f"{proxy_scheme}://{host}:{port}/", retries=1, timeout=LONG_TIMEOUT ) as http: if target_scheme == "https": target_url = self.https_url else: target_url = self.http_url with pytest.raises(MaxRetryError) as e: http.request("GET", f"{target_url}/") assert isinstance(e.value.reason, ProxyError) assert isinstance( e.value.reason.original_error, urllib3.exceptions.NameResolutionError ) def test_oldapi(self) -> None: with ProxyManager( connection_from_url(self.proxy_url), ca_certs=DEFAULT_CA # type: ignore[arg-type] ) as http: r = http.request("GET", f"{self.http_url}/") assert r.status == 200 r = http.request("GET", f"{self.https_url}/") assert r.status == 200 @resolvesLocalhostFQDN() def test_proxy_https_fqdn(self) -> None: with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: r = http.request("GET", f"{self.https_url_fqdn}/") assert r.status == 200 def test_proxy_verified(self) -> None: with proxy_from_url( self.proxy_url, cert_reqs="REQUIRED", ca_certs=self.bad_ca_path ) as http: with http._new_pool( "https", self.https_host, self.https_port ) as https_pool: with pytest.raises(MaxRetryError) as e: https_pool.request("GET", "/", retries=0) assert isinstance(e.value.reason, SSLError) assert ( "certificate verify failed" in str(e.value.reason) # PyPy is more specific or "self signed certificate in certificate chain" in str(e.value.reason) ), f"Expected 'certificate verify failed', instead got: {e.value.reason!r}" http = proxy_from_url( self.proxy_url, cert_reqs="REQUIRED", ca_certs=DEFAULT_CA ) with http._new_pool( "https", self.https_host, self.https_port ) as https_pool2: with contextlib.closing(https_pool._new_conn()) as conn: assert conn.__class__ == VerifiedHTTPSConnection https_pool2.request( "GET", "/" ) # Should succeed without exceptions. http = proxy_from_url( self.proxy_url, cert_reqs="REQUIRED", ca_certs=DEFAULT_CA ) with http._new_pool( "https", "127.0.0.1", self.https_port ) as https_fail_pool: with pytest.raises( MaxRetryError, match="doesn't match|IP address mismatch" ) as e: https_fail_pool.request("GET", "/", retries=0) assert isinstance(e.value.reason, SSLError) def test_redirect(self) -> None: with proxy_from_url(self.proxy_url) as http: r = http.request( "GET", f"{self.http_url}/redirect", fields={"target": f"{self.http_url}/"}, redirect=False, ) assert r.status == 303 r = http.request( "GET", f"{self.http_url}/redirect", fields={"target": f"{self.http_url}/"}, ) assert r.status == 200 assert r.data == b"Dummy server!" def test_cross_host_redirect(self) -> None: with proxy_from_url(self.proxy_url) as http: cross_host_location = f"{self.http_url_alt}/echo?a=b" with pytest.raises(MaxRetryError): http.request( "GET", f"{self.http_url}/redirect", fields={"target": cross_host_location}, retries=0, ) r = http.request( "GET", f"{self.http_url}/redirect", fields={"target": f"{self.http_url_alt}/echo?a=b"}, retries=1, ) assert isinstance(r, HTTPResponse) assert r._pool is not None assert r._pool.host != self.http_host_alt def test_cross_protocol_redirect(self) -> None: with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: cross_protocol_location = f"{self.https_url}/echo?a=b" with pytest.raises(MaxRetryError): http.request( "GET", f"{self.http_url}/redirect", fields={"target": cross_protocol_location}, retries=0, ) r = http.request( "GET", f"{self.http_url}/redirect", fields={"target": f"{self.https_url}/echo?a=b"}, retries=1, ) assert isinstance(r, HTTPResponse) assert r._pool is not None assert r._pool.host == self.https_host def test_headers(self) -> None: with proxy_from_url( self.proxy_url, headers={"Foo": "bar"}, proxy_headers={"Hickory": "dickory"}, ca_certs=DEFAULT_CA, ) as http: r = http.request_encode_url("GET", f"{self.http_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_url("GET", f"{self.http_url_alt}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert ( returned_headers.get("Host") == f"{self.http_host_alt}:{self.http_port}" ) r = http.request_encode_url("GET", f"{self.https_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") is None assert ( returned_headers.get("Host") == f"{self.https_host}:{self.https_port}" ) r = http.request_encode_body("POST", f"{self.http_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_url( "GET", f"{self.http_url}/headers", headers={"Baz": "quux"} ) returned_headers = r.json() assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_url( "GET", f"{self.https_url}/headers", headers={"Baz": "quux"} ) returned_headers = r.json() assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") is None assert ( returned_headers.get("Host") == f"{self.https_host}:{self.https_port}" ) r = http.request_encode_body( "GET", f"{self.http_url}/headers", headers={"Baz": "quux"} ) returned_headers = r.json() assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_body( "GET", f"{self.https_url}/headers", headers={"Baz": "quux"} ) returned_headers = r.json() assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") is None assert ( returned_headers.get("Host") == f"{self.https_host}:{self.https_port}" ) def test_https_headers(self) -> None: with proxy_from_url( self.https_proxy_url, headers={"Foo": "bar"}, proxy_headers={"Hickory": "dickory"}, ca_certs=DEFAULT_CA, ) as http: r = http.request_encode_url("GET", f"{self.http_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert returned_headers.get("Host") == f"{self.http_host}:{self.http_port}" r = http.request_encode_url("GET", f"{self.http_url_alt}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert ( returned_headers.get("Host") == f"{self.http_host_alt}:{self.http_port}" ) r = http.request_encode_body( "GET", f"{self.https_url}/headers", headers={"Baz": "quux"} ) returned_headers = r.json() assert returned_headers.get("Foo") is None assert returned_headers.get("Baz") == "quux" assert returned_headers.get("Hickory") is None assert ( returned_headers.get("Host") == f"{self.https_host}:{self.https_port}" ) def test_https_headers_forwarding_for_https(self) -> None: with proxy_from_url( self.https_proxy_url, headers={"Foo": "bar"}, proxy_headers={"Hickory": "dickory"}, ca_certs=DEFAULT_CA, use_forwarding_for_https=True, ) as http: r = http.request_encode_url("GET", f"{self.https_url}/headers") returned_headers = r.json() assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Hickory") == "dickory" assert ( returned_headers.get("Host") == f"{self.https_host}:{self.https_port}" ) def test_headerdict(self) -> None: default_headers = HTTPHeaderDict(a="b") proxy_headers = HTTPHeaderDict() proxy_headers.add("foo", "bar") with proxy_from_url( self.proxy_url, headers=default_headers, proxy_headers=proxy_headers ) as http: request_headers = HTTPHeaderDict(baz="quux") r = http.request("GET", f"{self.http_url}/headers", headers=request_headers) returned_headers = r.json() assert returned_headers.get("Foo") == "bar" assert returned_headers.get("Baz") == "quux" def test_proxy_pooling(self) -> None: with proxy_from_url(self.proxy_url, cert_reqs="NONE") as http: for x in range(2): http.urlopen("GET", self.http_url) assert len(http.pools) == 1 for x in range(2): http.urlopen("GET", self.http_url_alt) assert len(http.pools) == 1 for x in range(2): with pytest.warns(InsecureRequestWarning): http.urlopen("GET", self.https_url) assert len(http.pools) == 2 for x in range(2): with pytest.warns(InsecureRequestWarning): http.urlopen("GET", self.https_url_alt) assert len(http.pools) == 3 def test_proxy_pooling_ext(self) -> None: with proxy_from_url(self.proxy_url) as http: hc1 = http.connection_from_url(self.http_url) hc2 = http.connection_from_host(self.http_host, self.http_port) hc3 = http.connection_from_url(self.http_url_alt) hc4 = http.connection_from_host(self.http_host_alt, self.http_port) assert hc1 == hc2 assert hc2 == hc3 assert hc3 == hc4 sc1 = http.connection_from_url(self.https_url) sc2 = http.connection_from_host( self.https_host, self.https_port, scheme="https" ) sc3 = http.connection_from_url(self.https_url_alt) sc4 = http.connection_from_host( self.https_host_alt, self.https_port, scheme="https" ) assert sc1 == sc2 assert sc2 != sc3 assert sc3 == sc4 @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "target_scheme", "use_forwarding_for_https"], [ ("http", "http", False), ("https", "http", False), # 'use_forwarding_for_https' is only valid for HTTPS+HTTPS. ("https", "https", True), ], ) def test_forwarding_proxy_request_timeout( self, proxy_scheme: str, target_scheme: str, use_forwarding_for_https: bool ) -> None: proxy_url = self.https_proxy_url if proxy_scheme == "https" else self.proxy_url target_url = f"{target_scheme}://{TARPIT_HOST}" with proxy_from_url( proxy_url, ca_certs=DEFAULT_CA, use_forwarding_for_https=use_forwarding_for_https, ) as proxy: with pytest.raises(MaxRetryError) as e: timeout = Timeout(connect=LONG_TIMEOUT, read=SHORT_TIMEOUT) proxy.request("GET", target_url, timeout=timeout) # We sent the request to the proxy but didn't get any response # so we're not sure if that's being caused by the proxy or the # target so we put the blame on the target. assert isinstance(e.value.reason, ReadTimeoutError) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "target_scheme"], [("http", "https"), ("https", "https")] ) def test_tunneling_proxy_request_timeout( self, proxy_scheme: str, target_scheme: str ) -> None: proxy_url = self.https_proxy_url if proxy_scheme == "https" else self.proxy_url target_url = f"{target_scheme}://{TARPIT_HOST}" with proxy_from_url( proxy_url, ca_certs=DEFAULT_CA, ) as proxy: with pytest.raises(MaxRetryError) as e: timeout = Timeout(connect=LONG_TIMEOUT, read=SHORT_TIMEOUT) proxy.request("GET", target_url, timeout=timeout) assert isinstance(e.value.reason, ReadTimeoutError) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "target_scheme", "use_forwarding_for_https"], [ ("http", "http", False), ("https", "http", False), # 'use_forwarding_for_https' is only valid for HTTPS+HTTPS. ("https", "https", True), ], ) def test_forwarding_proxy_connect_timeout( self, proxy_scheme: str, target_scheme: str, use_forwarding_for_https: bool ) -> None: proxy_url = f"{proxy_scheme}://{TARPIT_HOST}" target_url = self.https_url if target_scheme == "https" else self.http_url with proxy_from_url( proxy_url, ca_certs=DEFAULT_CA, timeout=SHORT_TIMEOUT, use_forwarding_for_https=use_forwarding_for_https, ) as proxy: with pytest.raises(MaxRetryError) as e: proxy.request("GET", target_url) assert isinstance(e.value.reason, ProxyError) assert isinstance(e.value.reason.original_error, ConnectTimeoutError) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "target_scheme"], [("http", "https"), ("https", "https")] ) def test_tunneling_proxy_connect_timeout( self, proxy_scheme: str, target_scheme: str ) -> None: proxy_url = f"{proxy_scheme}://{TARPIT_HOST}" target_url = self.https_url if target_scheme == "https" else self.http_url with proxy_from_url( proxy_url, ca_certs=DEFAULT_CA, timeout=SHORT_TIMEOUT ) as proxy: with pytest.raises(MaxRetryError) as e: proxy.request("GET", target_url) assert isinstance(e.value.reason, ProxyError) assert isinstance(e.value.reason.original_error, ConnectTimeoutError) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["target_scheme", "use_forwarding_for_https"], [ ("http", False), ("https", False), ("https", True), ], ) def test_https_proxy_tls_error( self, target_scheme: str, use_forwarding_for_https: str ) -> None: target_url = self.https_url if target_scheme == "https" else self.http_url proxy_ctx = ssl.create_default_context() with proxy_from_url( self.https_proxy_url, proxy_ssl_context=proxy_ctx, use_forwarding_for_https=use_forwarding_for_https, ) as proxy: with pytest.raises(MaxRetryError) as e: proxy.request("GET", target_url) assert isinstance(e.value.reason, ProxyError) assert isinstance(e.value.reason.original_error, SSLError) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") @requires_network() @pytest.mark.parametrize( ["proxy_scheme", "use_forwarding_for_https"], [ ("http", False), ("https", False), ("https", True), ], ) def test_proxy_https_target_tls_error( self, proxy_scheme: str, use_forwarding_for_https: str ) -> None: if proxy_scheme == "https" and use_forwarding_for_https: pytest.skip("Test is expected to fail due to urllib3/urllib3#2577") proxy_url = self.https_proxy_url if proxy_scheme == "https" else self.proxy_url proxy_ctx = ssl.create_default_context() proxy_ctx.load_verify_locations(DEFAULT_CA) ctx = ssl.create_default_context() with proxy_from_url( proxy_url, proxy_ssl_context=proxy_ctx, ssl_context=ctx, use_forwarding_for_https=use_forwarding_for_https, ) as proxy: with pytest.raises(MaxRetryError) as e: proxy.request("GET", self.https_url) assert isinstance(e.value.reason, SSLError) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_scheme_host_case_insensitive(self) -> None: """Assert that upper-case schemes and hosts are normalized.""" with proxy_from_url(self.proxy_url.upper(), ca_certs=DEFAULT_CA) as http: r = http.request("GET", f"{self.http_url.upper()}/") assert r.status == 200 r = http.request("GET", f"{self.https_url.upper()}/") assert r.status == 200 @pytest.mark.parametrize( "url, error_msg", [ ( "127.0.0.1", "Proxy URL had no scheme, should start with http:// or https://", ), ( "localhost:8080", "Proxy URL had no scheme, should start with http:// or https://", ), ( "ftp://google.com", "Proxy URL had unsupported scheme ftp, should use http:// or https://", ), ], ) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_invalid_schema(self, url: str, error_msg: str) -> None: with pytest.raises(ProxySchemeUnknown, match=error_msg): proxy_from_url(url) @pytest.mark.skipif(not HAS_IPV6, reason="Only runs on IPv6 systems") class TestIPv6HTTPProxyManager(IPv6HypercornDummyProxyTestCase): @classmethod def setup_class(cls) -> None: super().setup_class() cls.http_url = f"http://{cls.http_host}:{int(cls.http_port)}" cls.http_url_alt = f"http://{cls.http_host_alt}:{int(cls.http_port)}" cls.https_url = f"https://{cls.https_host}:{int(cls.https_port)}" cls.https_url_alt = f"https://{cls.https_host_alt}:{int(cls.https_port)}" cls.proxy_url = f"http://[{cls.proxy_host}]:{int(cls.proxy_port)}" # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_basic_ipv6_proxy(self) -> None: with proxy_from_url(self.proxy_url, ca_certs=DEFAULT_CA) as http: r = http.request("GET", f"{self.http_url}/") assert r.status == 200 r = http.request("GET", f"{self.https_url}/") assert r.status == 200 class TestHTTPSProxyVerification: @staticmethod def _get_proxy_fingerprint_md5(ca_path: str) -> str: proxy_pem_path = pathlib.Path(ca_path).parent / "proxy.pem" proxy_der = ssl.PEM_cert_to_DER_cert(proxy_pem_path.read_text()) proxy_hashed = hashlib.md5(proxy_der).digest() fingerprint = binascii.hexlify(proxy_hashed).decode("ascii") return fingerprint @staticmethod def _get_certificate_formatted_proxy_host(host: str) -> str: try: addr = ipaddress.ip_address(host) except ValueError: return host if addr.version != 6: return host # Transform ipv6 like '::1' to 0:0:0:0:0:0:0:1 via '0000:0000:0000:0000:0000:0000:0000:0001' return addr.exploded.replace("0000", "0").replace("000", "") # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_assert_fingerprint_md5( self, no_san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: proxy, server = no_san_proxy_with_server proxy_url = f"https://{proxy.host}:{proxy.port}" destination_url = f"https://{server.host}:{server.port}" proxy_fingerprint = self._get_proxy_fingerprint_md5(proxy.ca_certs) with proxy_from_url( proxy_url, ca_certs=proxy.ca_certs, proxy_assert_fingerprint=proxy_fingerprint, ) as https: https.request("GET", destination_url) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_assert_fingerprint_md5_non_matching( self, no_san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: proxy, server = no_san_proxy_with_server proxy_url = f"https://{proxy.host}:{proxy.port}" destination_url = f"https://{server.host}:{server.port}" proxy_fingerprint = self._get_proxy_fingerprint_md5(proxy.ca_certs) new_char = "b" if proxy_fingerprint[5] == "a" else "a" proxy_fingerprint = proxy_fingerprint[:5] + new_char + proxy_fingerprint[6:] with proxy_from_url( proxy_url, ca_certs=proxy.ca_certs, proxy_assert_fingerprint=proxy_fingerprint, ) as https: with pytest.raises(MaxRetryError) as e: https.request("GET", destination_url) assert "Fingerprints did not match" in str(e) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_assert_hostname( self, san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: proxy, server = san_proxy_with_server destination_url = f"https://{server.host}:{server.port}" with proxy_from_url( proxy.base_url, ca_certs=proxy.ca_certs, proxy_assert_hostname=proxy.host ) as https: https.request("GET", destination_url) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_assert_hostname_non_matching( self, san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: proxy, server = san_proxy_with_server destination_url = f"https://{server.host}:{server.port}" proxy_hostname = "example.com" with proxy_from_url( proxy.base_url, ca_certs=proxy.ca_certs, proxy_assert_hostname=proxy_hostname, ) as https: with pytest.raises(MaxRetryError) as e: https.request("GET", destination_url) proxy_host = self._get_certificate_formatted_proxy_host(proxy.host) msg = f"hostname \\'{proxy_hostname}\\' doesn\\'t match \\'{proxy_host}\\'" assert msg in str(e) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_hostname_verification( self, no_localhost_san_server: ServerConfig ) -> None: bad_server = no_localhost_san_server bad_proxy_url = f"https://{bad_server.host}:{bad_server.port}" # An exception will be raised before we contact the destination domain. test_url = "testing.com" with proxy_from_url(bad_proxy_url, ca_certs=bad_server.ca_certs) as https: with pytest.raises(MaxRetryError) as e: https.request("GET", "http://%s/" % test_url) assert isinstance(e.value.reason, ProxyError) ssl_error = e.value.reason.original_error assert isinstance(ssl_error, SSLError) assert "hostname 'localhost' doesn't match" in str( ssl_error ) or "Hostname mismatch" in str(ssl_error) with pytest.raises(MaxRetryError) as e: https.request("GET", "https://%s/" % test_url) assert isinstance(e.value.reason, ProxyError) ssl_error = e.value.reason.original_error assert isinstance(ssl_error, SSLError) assert "hostname 'localhost' doesn't match" in str( ssl_error ) or "Hostname mismatch" in str(ssl_error) # stdlib http.client.HTTPConnection._tunnel() causes a ResourceWarning # see https://github.com/python/cpython/issues/103472 @pytest.mark.filterwarnings("default::ResourceWarning") def test_https_proxy_ipv4_san( self, ipv4_san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: proxy, server = ipv4_san_proxy_with_server proxy_url = f"https://{proxy.host}:{proxy.port}" destination_url = f"https://{server.host}:{server.port}" with proxy_from_url(proxy_url, ca_certs=proxy.ca_certs) as https: r = https.request("GET", destination_url) assert r.status == 200 def test_https_proxy_ipv6_san( self, ipv6_san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: proxy, server = ipv6_san_proxy_with_server proxy_url = f"https://[{proxy.host}]:{proxy.port}" destination_url = f"https://{server.host}:{server.port}" with proxy_from_url(proxy_url, ca_certs=proxy.ca_certs) as https: r = https.request("GET", destination_url) assert r.status == 200 @pytest.mark.parametrize("target_scheme", ["http", "https"]) def test_https_proxy_no_san( self, no_san_proxy_with_server: tuple[ServerConfig, ServerConfig], target_scheme: str, ) -> None: proxy, server = no_san_proxy_with_server proxy_url = f"https://{proxy.host}:{proxy.port}" destination_url = f"{target_scheme}://{server.host}:{server.port}" with proxy_from_url(proxy_url, ca_certs=proxy.ca_certs) as https: with pytest.raises(MaxRetryError) as e: https.request("GET", destination_url) assert isinstance(e.value.reason, ProxyError) ssl_error = e.value.reason.original_error assert isinstance(ssl_error, SSLError) assert "no appropriate subjectAltName fields were found" in str( ssl_error ) or "Hostname mismatch, certificate is not valid for 'localhost'" in str( ssl_error ) def test_https_proxy_no_san_hostname_checks_common_name( self, no_san_proxy_with_server: tuple[ServerConfig, ServerConfig] ) -> None: proxy, server = no_san_proxy_with_server proxy_url = f"https://{proxy.host}:{proxy.port}" destination_url = f"https://{server.host}:{server.port}" proxy_ctx = urllib3.util.ssl_.create_urllib3_context() try: proxy_ctx.hostname_checks_common_name = True # PyPy doesn't like us setting 'hostname_checks_common_name' # but also has it enabled by default so we need to handle that. except AttributeError: pass if getattr(proxy_ctx, "hostname_checks_common_name", False) is not True: pytest.skip("Test requires 'SSLContext.hostname_checks_common_name=True'") with proxy_from_url( proxy_url, ca_certs=proxy.ca_certs, proxy_ssl_context=proxy_ctx ) as https: https.request("GET", destination_url) with_dummyserver/test_chunked_transfer.py 0000644 00000024666 15025234504 0015135 0 ustar 00 from __future__ import annotations import socket import pytest from dummyserver.testcase import ( ConnectionMarker, SocketDummyServerTestCase, consume_socket, ) from urllib3 import HTTPConnectionPool from urllib3.util import SKIP_HEADER from urllib3.util.retry import Retry class TestChunkedTransfer(SocketDummyServerTestCase): def start_chunked_handler(self) -> None: self.buffer = b"" def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] while not self.buffer.endswith(b"\r\n0\r\n\r\n"): self.buffer += sock.recv(65536) sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-type: text/plain\r\n" b"Content-Length: 0\r\n" b"\r\n" ) sock.close() self._start_server(socket_handler) @pytest.mark.parametrize( "chunks", [ ["foo", "bar", "", "bazzzzzzzzzzzzzzzzzzzzzz"], [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"], ], ) def test_chunks(self, chunks: list[bytes | str]) -> None: self.start_chunked_handler() with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen("GET", "/", body=chunks, headers=dict(DNT="1"), chunked=True) # type: ignore[arg-type] assert b"Transfer-Encoding" in self.buffer body = self.buffer.split(b"\r\n\r\n", 1)[1] lines = body.split(b"\r\n") # Empty chunks should have been skipped, as this could not be distinguished # from terminating the transmission for i, chunk in enumerate( [c.decode() if isinstance(c, bytes) else c for c in chunks if c] ): assert lines[i * 2] == hex(len(chunk))[2:].encode("utf-8") assert lines[i * 2 + 1] == chunk.encode("utf-8") def _test_body(self, data: bytes | str | None) -> None: self.start_chunked_handler() with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen("GET", "/", data, chunked=True) header, body = self.buffer.split(b"\r\n\r\n", 1) assert b"Transfer-Encoding: chunked" in header.split(b"\r\n") if data: bdata = data if isinstance(data, bytes) else data.encode("utf-8") assert b"\r\n" + bdata + b"\r\n" in body assert body.endswith(b"\r\n0\r\n\r\n") len_str = body.split(b"\r\n", 1)[0] stated_len = int(len_str, 16) assert stated_len == len(bdata) else: assert body == b"0\r\n\r\n" def test_bytestring_body(self) -> None: self._test_body(b"thisshouldbeonechunk\r\nasdf") def test_unicode_body(self) -> None: self._test_body("thisshouldbeonechunk\r\näöüß") def test_empty_body(self) -> None: self._test_body(None) def test_empty_string_body(self) -> None: self._test_body("") def test_empty_iterable_body(self) -> None: self._test_body(None) def _get_header_lines(self, prefix: bytes) -> list[bytes]: header_block = self.buffer.split(b"\r\n\r\n", 1)[0].lower() header_lines = header_block.split(b"\r\n")[1:] return [x for x in header_lines if x.startswith(prefix)] def test_removes_duplicate_host_header(self) -> None: self.start_chunked_handler() chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen( "GET", "/", body=chunks, headers={"Host": "test.org"}, chunked=True ) host_headers = self._get_header_lines(b"host") assert len(host_headers) == 1 def test_provides_default_host_header(self) -> None: self.start_chunked_handler() chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen("GET", "/", body=chunks, chunked=True) host_headers = self._get_header_lines(b"host") assert len(host_headers) == 1 def test_provides_default_user_agent_header(self) -> None: self.start_chunked_handler() chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen("GET", "/", body=chunks, chunked=True) ua_headers = self._get_header_lines(b"user-agent") assert len(ua_headers) == 1 def test_preserve_user_agent_header(self) -> None: self.start_chunked_handler() chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen( "GET", "/", body=chunks, headers={"user-Agent": "test-agent"}, chunked=True, ) ua_headers = self._get_header_lines(b"user-agent") # Validate that there is only one User-Agent header. assert len(ua_headers) == 1 # Validate that the existing User-Agent header is the one that was # provided. assert ua_headers[0] == b"user-agent: test-agent" def test_remove_user_agent_header(self) -> None: self.start_chunked_handler() chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen( "GET", "/", body=chunks, headers={"User-Agent": SKIP_HEADER}, chunked=True, ) ua_headers = self._get_header_lines(b"user-agent") assert len(ua_headers) == 0 def test_provides_default_transfer_encoding_header(self) -> None: self.start_chunked_handler() chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen("GET", "/", body=chunks, chunked=True) te_headers = self._get_header_lines(b"transfer-encoding") assert len(te_headers) == 1 def test_preserve_transfer_encoding_header(self) -> None: self.start_chunked_handler() chunks = [b"foo", b"bar", b"", b"bazzzzzzzzzzzzzzzzzzzzzz"] with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.urlopen( "GET", "/", body=chunks, headers={"transfer-Encoding": "test-transfer-encoding"}, chunked=True, ) te_headers = self._get_header_lines(b"transfer-encoding") # Validate that there is only one Transfer-Encoding header. assert len(te_headers) == 1 # Validate that the existing Transfer-Encoding header is the one that # was provided. assert te_headers[0] == b"transfer-encoding: test-transfer-encoding" def test_preserve_chunked_on_retry_after(self) -> None: self.chunked_requests = 0 self.socks: list[socket.socket] = [] def socket_handler(listener: socket.socket) -> None: for _ in range(2): sock = listener.accept()[0] self.socks.append(sock) request = consume_socket(sock) if b"Transfer-Encoding: chunked" in request.split(b"\r\n"): self.chunked_requests += 1 sock.send( b"HTTP/1.1 429 Too Many Requests\r\n" b"Content-Type: text/plain\r\n" b"Retry-After: 1\r\n" b"Content-Length: 0\r\n" b"Connection: close\r\n" b"\r\n" ) self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: retries = Retry(total=1) pool.urlopen("GET", "/", chunked=True, retries=retries) for sock in self.socks: sock.close() assert self.chunked_requests == 2 def test_preserve_chunked_on_redirect( self, monkeypatch: pytest.MonkeyPatch ) -> None: self.chunked_requests = 0 def socket_handler(listener: socket.socket) -> None: for i in range(2): sock = listener.accept()[0] request = ConnectionMarker.consume_request(sock) if b"Transfer-Encoding: chunked" in request.split(b"\r\n"): self.chunked_requests += 1 if i == 0: sock.sendall( b"HTTP/1.1 301 Moved Permanently\r\n" b"Location: /redirect\r\n\r\n" ) else: sock.sendall(b"HTTP/1.1 200 OK\r\n\r\n") sock.close() self._start_server(socket_handler) with ConnectionMarker.mark(monkeypatch): with HTTPConnectionPool(self.host, self.port) as pool: retries = Retry(redirect=1) pool.urlopen( "GET", "/", chunked=True, preload_content=False, retries=retries ) assert self.chunked_requests == 2 def test_preserve_chunked_on_broken_connection( self, monkeypatch: pytest.MonkeyPatch ) -> None: self.chunked_requests = 0 def socket_handler(listener: socket.socket) -> None: for i in range(2): sock = listener.accept()[0] request = ConnectionMarker.consume_request(sock) if b"Transfer-Encoding: chunked" in request.split(b"\r\n"): self.chunked_requests += 1 if i == 0: # Bad HTTP version will trigger a connection close sock.sendall(b"HTTP/0.5 200 OK\r\n\r\n") else: sock.sendall(b"HTTP/1.1 200 OK\r\n\r\n") sock.close() self._start_server(socket_handler) with ConnectionMarker.mark(monkeypatch): with HTTPConnectionPool(self.host, self.port) as pool: retries = Retry(read=1) pool.urlopen( "GET", "/", chunked=True, preload_content=False, retries=retries ) assert self.chunked_requests == 2 with_dummyserver/test_socketlevel.py 0000644 00000256155 15025234504 0014130 0 ustar 00 # TODO: Break this module up into pieces. Maybe group by functionality tested # rather than the socket level-ness of it. from __future__ import annotations import contextlib import errno import io import os import os.path import select import shutil import socket import ssl import tempfile import threading import typing import zlib from collections import OrderedDict from pathlib import Path from test import LONG_TIMEOUT, SHORT_TIMEOUT, notWindows, resolvesLocalhostFQDN from threading import Event from unittest import mock import pytest import trustme from dummyserver.socketserver import ( DEFAULT_CA, DEFAULT_CERTS, encrypt_key_pem, get_unreachable_address, ) from dummyserver.testcase import SocketDummyServerTestCase, consume_socket from urllib3 import HTTPConnectionPool, HTTPSConnectionPool, ProxyManager, util from urllib3._collections import HTTPHeaderDict from urllib3.connection import HTTPConnection, _get_default_user_agent from urllib3.connectionpool import _url_from_pool from urllib3.exceptions import ( InsecureRequestWarning, MaxRetryError, ProtocolError, ProxyError, ReadTimeoutError, SSLError, ) from urllib3.poolmanager import proxy_from_url from urllib3.util import ssl_, ssl_wrap_socket from urllib3.util.retry import Retry from urllib3.util.timeout import Timeout from .. import LogRecorder, has_alpn if typing.TYPE_CHECKING: from _typeshed import StrOrBytesPath else: StrOrBytesPath = object class TestCookies(SocketDummyServerTestCase): def test_multi_setcookie(self) -> None: def multicookie_response_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( b"HTTP/1.1 200 OK\r\n" b"Set-Cookie: foo=1\r\n" b"Set-Cookie: bar=1\r\n" b"\r\n" ) sock.close() self._start_server(multicookie_response_handler) with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/", retries=0) assert r.headers == {"set-cookie": "foo=1, bar=1"} assert r.headers.getlist("set-cookie") == ["foo=1", "bar=1"] class TestSNI(SocketDummyServerTestCase): def test_hostname_in_first_request_packet(self) -> None: done_receiving = Event() self.buf = b"" def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] self.buf = sock.recv(65536) # We only accept one packet done_receiving.set() # let the test know it can proceed sock.close() self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port) as pool: try: pool.request("GET", "/", retries=0) except MaxRetryError: # We are violating the protocol pass successful = done_receiving.wait(LONG_TIMEOUT) assert successful, "Timed out waiting for connection accept" assert ( self.host.encode("ascii") in self.buf ), "missing hostname in SSL handshake" class TestALPN(SocketDummyServerTestCase): def test_alpn_protocol_in_first_request_packet(self) -> None: if not has_alpn(): pytest.skip("ALPN-support not available") done_receiving = Event() self.buf = b"" def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] self.buf = sock.recv(65536) # We only accept one packet done_receiving.set() # let the test know it can proceed sock.close() self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port) as pool: try: pool.request("GET", "/", retries=0) except MaxRetryError: # We are violating the protocol pass successful = done_receiving.wait(LONG_TIMEOUT) assert successful, "Timed out waiting for connection accept" for protocol in util.ALPN_PROTOCOLS: assert ( protocol.encode("ascii") in self.buf ), "missing ALPN protocol in SSL handshake" def original_ssl_wrap_socket( sock: socket.socket, keyfile: StrOrBytesPath | None = None, certfile: StrOrBytesPath | None = None, server_side: bool = False, cert_reqs: ssl.VerifyMode = ssl.CERT_NONE, ssl_version: int = ssl.PROTOCOL_TLS, ca_certs: str | None = None, do_handshake_on_connect: bool = True, suppress_ragged_eofs: bool = True, ciphers: str | None = None, ) -> ssl.SSLSocket: if server_side and not certfile: raise ValueError("certfile must be specified for server-side operations") if keyfile and not certfile: raise ValueError("certfile must be specified") context = ssl.SSLContext(ssl_version) context.verify_mode = cert_reqs if ca_certs: context.load_verify_locations(ca_certs) if certfile: context.load_cert_chain(certfile, keyfile) if ciphers: context.set_ciphers(ciphers) return context.wrap_socket( sock=sock, server_side=server_side, do_handshake_on_connect=do_handshake_on_connect, suppress_ragged_eofs=suppress_ragged_eofs, ) class TestClientCerts(SocketDummyServerTestCase): """ Tests for client certificate support. """ @classmethod def setup_class(cls) -> None: cls.tmpdir = tempfile.mkdtemp() ca = trustme.CA() cert = ca.issue_cert("localhost") encrypted_key = encrypt_key_pem(cert.private_key_pem, b"letmein") cls.ca_path = os.path.join(cls.tmpdir, "ca.pem") cls.cert_combined_path = os.path.join(cls.tmpdir, "server.combined.pem") cls.cert_path = os.path.join(cls.tmpdir, "server.pem") cls.key_path = os.path.join(cls.tmpdir, "key.pem") cls.password_key_path = os.path.join(cls.tmpdir, "password_key.pem") ca.cert_pem.write_to_path(cls.ca_path) cert.private_key_and_cert_chain_pem.write_to_path(cls.cert_combined_path) cert.cert_chain_pems[0].write_to_path(cls.cert_path) cert.private_key_pem.write_to_path(cls.key_path) encrypted_key.write_to_path(cls.password_key_path) @classmethod def teardown_class(cls) -> None: shutil.rmtree(cls.tmpdir) def _wrap_in_ssl(self, sock: socket.socket) -> ssl.SSLSocket: """ Given a single socket, wraps it in TLS. """ return original_ssl_wrap_socket( sock, ssl_version=ssl.PROTOCOL_SSLv23, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_path, certfile=self.cert_path, keyfile=self.key_path, server_side=True, ) def test_client_certs_two_files(self) -> None: """ Having a client cert in a separate file to its associated key works properly. """ done_receiving = Event() client_certs = [] def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] sock = self._wrap_in_ssl(sock) client_certs.append(sock.getpeercert()) data = b"" while not data.endswith(b"\r\n\r\n"): data += sock.recv(8192) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: testsocket\r\n" b"Connection: close\r\n" b"Content-Length: 6\r\n" b"\r\n" b"Valid!" ) done_receiving.wait(5) sock.close() self._start_server(socket_handler) with HTTPSConnectionPool( self.host, self.port, cert_file=self.cert_path, key_file=self.key_path, cert_reqs="REQUIRED", ca_certs=self.ca_path, ) as pool: pool.request("GET", "/", retries=0) done_receiving.set() assert len(client_certs) == 1 def test_client_certs_one_file(self) -> None: """ Having a client cert and its associated private key in just one file works properly. """ done_receiving = Event() client_certs = [] def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] sock = self._wrap_in_ssl(sock) client_certs.append(sock.getpeercert()) data = b"" while not data.endswith(b"\r\n\r\n"): data += sock.recv(8192) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: testsocket\r\n" b"Connection: close\r\n" b"Content-Length: 6\r\n" b"\r\n" b"Valid!" ) done_receiving.wait(5) sock.close() self._start_server(socket_handler) with HTTPSConnectionPool( self.host, self.port, cert_file=self.cert_combined_path, cert_reqs="REQUIRED", ca_certs=self.ca_path, ) as pool: pool.request("GET", "/", retries=0) done_receiving.set() assert len(client_certs) == 1 def test_missing_client_certs_raises_error(self) -> None: """ Having client certs not be present causes an error. """ done_receiving = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] try: self._wrap_in_ssl(sock) except ssl.SSLError: pass done_receiving.wait(5) sock.close() self._start_server(socket_handler) with HTTPSConnectionPool( self.host, self.port, cert_reqs="REQUIRED", ca_certs=self.ca_path ) as pool: with pytest.raises(MaxRetryError): pool.request("GET", "/", retries=0) done_receiving.set() done_receiving.set() def test_client_cert_with_string_password(self) -> None: self.run_client_cert_with_password_test("letmein") def test_client_cert_with_bytes_password(self) -> None: self.run_client_cert_with_password_test(b"letmein") def run_client_cert_with_password_test(self, password: bytes | str) -> None: """ Tests client certificate password functionality """ done_receiving = Event() client_certs = [] def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] sock = self._wrap_in_ssl(sock) client_certs.append(sock.getpeercert()) data = b"" while not data.endswith(b"\r\n\r\n"): data += sock.recv(8192) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: testsocket\r\n" b"Connection: close\r\n" b"Content-Length: 6\r\n" b"\r\n" b"Valid!" ) done_receiving.wait(5) sock.close() self._start_server(socket_handler) assert ssl_.SSLContext is not None ssl_context = ssl_.SSLContext(ssl_.PROTOCOL_SSLv23) ssl_context.load_cert_chain( certfile=self.cert_path, keyfile=self.password_key_path, password=password ) with HTTPSConnectionPool( self.host, self.port, ssl_context=ssl_context, cert_reqs="REQUIRED", ca_certs=self.ca_path, ) as pool: pool.request("GET", "/", retries=0) done_receiving.set() assert len(client_certs) == 1 def test_load_keyfile_with_invalid_password(self) -> None: assert ssl_.SSLContext is not None context = ssl_.SSLContext(ssl_.PROTOCOL_SSLv23) with pytest.raises(ssl.SSLError): context.load_cert_chain( certfile=self.cert_path, keyfile=self.password_key_path, password=b"letmei", ) def test_load_invalid_cert_file(self) -> None: assert ssl_.SSLContext is not None context = ssl_.SSLContext(ssl_.PROTOCOL_SSLv23) with pytest.raises(ssl.SSLError): context.load_cert_chain(certfile=self.password_key_path) class TestSocketClosing(SocketDummyServerTestCase): def test_recovery_when_server_closes_connection(self) -> None: # Does the pool work seamlessly if an open connection in the # connection pool gets hung up on by the server, then reaches # the front of the queue again? done_closing = Event() def socket_handler(listener: socket.socket) -> None: for i in 0, 1: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf = sock.recv(65536) body = f"Response {int(i)}" sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(body), body) ).encode("utf-8") ) sock.close() # simulate a server timing out, closing socket done_closing.set() # let the test know it can proceed self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/", retries=0) assert response.status == 200 assert response.data == b"Response 0" done_closing.wait() # wait until the socket in our pool gets closed response = pool.request("GET", "/", retries=0) assert response.status == 200 assert response.data == b"Response 1" def test_connection_refused(self) -> None: # Does the pool retry if there is no listener on the port? host, port = get_unreachable_address() with HTTPConnectionPool(host, port, maxsize=3, block=True) as http: with pytest.raises(MaxRetryError): http.request("GET", "/", retries=0, release_conn=False) assert http.pool is not None assert http.pool.qsize() == http.pool.maxsize def test_connection_read_timeout(self) -> None: timed_out = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] while not sock.recv(65536).endswith(b"\r\n\r\n"): pass timed_out.wait() sock.close() self._start_server(socket_handler) with HTTPConnectionPool( self.host, self.port, timeout=SHORT_TIMEOUT, retries=False, maxsize=3, block=True, ) as http: try: with pytest.raises(ReadTimeoutError): http.request("GET", "/", release_conn=False) finally: timed_out.set() assert http.pool is not None assert http.pool.qsize() == http.pool.maxsize def test_read_timeout_dont_retry_method_not_in_allowlist(self) -> None: timed_out = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] sock.recv(65536) timed_out.wait() sock.close() self._start_server(socket_handler) with HTTPConnectionPool( self.host, self.port, timeout=LONG_TIMEOUT, retries=True ) as pool: try: with pytest.raises(ReadTimeoutError): pool.request("POST", "/") finally: timed_out.set() def test_https_connection_read_timeout(self) -> None: """Handshake timeouts should fail with a Timeout""" timed_out = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] while not sock.recv(65536): pass timed_out.wait() sock.close() # first ReadTimeoutError due to SocketTimeout self._start_server(socket_handler) with HTTPSConnectionPool( self.host, self.port, timeout=LONG_TIMEOUT, retries=False ) as pool: try: with pytest.raises(ReadTimeoutError): pool.request("GET", "/") finally: timed_out.set() # second ReadTimeoutError due to errno with HTTPSConnectionPool(host=self.host): err = OSError() err.errno = errno.EAGAIN with pytest.raises(ReadTimeoutError): pool._raise_timeout(err, "", 0) def test_timeout_errors_cause_retries(self) -> None: def socket_handler(listener: socket.socket) -> None: sock_timeout = listener.accept()[0] # Wait for a second request before closing the first socket. sock = listener.accept()[0] sock_timeout.close() # Second request. buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) # Now respond immediately. body = "Response 2" sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(body), body) ).encode("utf-8") ) sock.close() # In situations where the main thread throws an exception, the server # thread can hang on an accept() call. This ensures everything times # out within 1 second. This should be long enough for any socket # operations in the test suite to complete default_timeout = socket.getdefaulttimeout() socket.setdefaulttimeout(1) try: self._start_server(socket_handler) t = Timeout(connect=LONG_TIMEOUT, read=LONG_TIMEOUT) with HTTPConnectionPool(self.host, self.port, timeout=t) as pool: response = pool.request("GET", "/", retries=1) assert response.status == 200 assert response.data == b"Response 2" finally: socket.setdefaulttimeout(default_timeout) def test_delayed_body_read_timeout(self) -> None: timed_out = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" body = "Hi" while not buf.endswith(b"\r\n\r\n"): buf = sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" % len(body) ).encode("utf-8") ) timed_out.wait() sock.send(body.encode("utf-8")) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: response = pool.urlopen( "GET", "/", retries=0, preload_content=False, timeout=Timeout(connect=1, read=LONG_TIMEOUT), ) try: with pytest.raises(ReadTimeoutError): response.read() finally: timed_out.set() def test_delayed_body_read_timeout_with_preload(self) -> None: timed_out = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" body = "Hi" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" % len(body) ).encode("utf-8") ) timed_out.wait(5) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: try: with pytest.raises(ReadTimeoutError): timeout = Timeout(connect=LONG_TIMEOUT, read=SHORT_TIMEOUT) pool.urlopen("GET", "/", retries=False, timeout=timeout) finally: timed_out.set() def test_incomplete_response(self) -> None: body = "Response" partial_body = body[:2] def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] # Consume request buf = b"" while not buf.endswith(b"\r\n\r\n"): buf = sock.recv(65536) # Send partial response and close socket. sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(body), partial_body) ).encode("utf-8") ) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/", retries=0, preload_content=False) with pytest.raises(ProtocolError): response.read() def test_retry_weird_http_version(self) -> None: """Retry class should handle httplib.BadStatusLine errors properly""" def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] # First request. # Pause before responding so the first request times out. buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) # send unknown http protocol body = "bad http 0.5 response" sock.send( ( "HTTP/0.5 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(body), body) ).encode("utf-8") ) sock.close() # Second request. sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) # Now respond immediately. sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "foo" % (len("foo")) ).encode("utf-8") ) sock.close() # Close the socket. self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: retry = Retry(read=1) response = pool.request("GET", "/", retries=retry) assert response.status == 200 assert response.data == b"foo" def test_connection_cleanup_on_read_timeout(self) -> None: timed_out = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" body = "Hi" while not buf.endswith(b"\r\n\r\n"): buf = sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" % len(body) ).encode("utf-8") ) timed_out.wait() sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: assert pool.pool is not None poolsize = pool.pool.qsize() response = pool.urlopen( "GET", "/", retries=0, preload_content=False, timeout=LONG_TIMEOUT ) try: with pytest.raises(ReadTimeoutError): response.read() assert poolsize == pool.pool.qsize() finally: timed_out.set() def test_connection_cleanup_on_protocol_error_during_read(self) -> None: body = "Response" partial_body = body[:2] def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] # Consume request buf = b"" while not buf.endswith(b"\r\n\r\n"): buf = sock.recv(65536) # Send partial response and close socket. sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(body), partial_body) ).encode("utf-8") ) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: assert pool.pool is not None poolsize = pool.pool.qsize() response = pool.request("GET", "/", retries=0, preload_content=False) with pytest.raises(ProtocolError): response.read() assert poolsize == pool.pool.qsize() def test_connection_closed_on_read_timeout_preload_false(self) -> None: timed_out = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] # Consume request buf = b"" while not buf.endswith(b"\r\n\r\n"): buf = sock.recv(65535) # Send partial chunked response and then hang. sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Transfer-Encoding: chunked\r\n" b"\r\n" b"8\r\n" b"12345678\r\n" ) timed_out.wait(5) # Expect a new request, but keep hold of the old socket to avoid # leaking it. Because we don't want to hang this thread, we # actually use select.select to confirm that a new request is # coming in: this lets us time the thread out. rlist, _, _ = select.select([listener], [], [], 1) assert rlist new_sock = listener.accept()[0] # Consume request buf = b"" while not buf.endswith(b"\r\n\r\n"): buf = new_sock.recv(65535) # Send complete chunked response. new_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Transfer-Encoding: chunked\r\n" b"\r\n" b"8\r\n" b"12345678\r\n" b"0\r\n\r\n" ) new_sock.close() sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: # First request should fail. response = pool.urlopen( "GET", "/", retries=0, preload_content=False, timeout=LONG_TIMEOUT ) try: with pytest.raises(ReadTimeoutError): response.read() finally: timed_out.set() # Second should succeed. response = pool.urlopen( "GET", "/", retries=0, preload_content=False, timeout=LONG_TIMEOUT ) assert len(response.read()) == 8 def test_closing_response_actually_closes_connection(self) -> None: done_closing = Event() complete = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf = sock.recv(65536) sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 0\r\n" b"\r\n" ) # Wait for the socket to close. done_closing.wait(timeout=LONG_TIMEOUT) # Look for the empty string to show that the connection got closed. # Don't get stuck in a timeout. sock.settimeout(LONG_TIMEOUT) new_data = sock.recv(65536) assert not new_data sock.close() complete.set() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: response = pool.request("GET", "/", retries=0, preload_content=False) assert response.status == 200 response.close() done_closing.set() # wait until the socket in our pool gets closed successful = complete.wait(timeout=LONG_TIMEOUT) assert successful, "Timed out waiting for connection close" def test_release_conn_param_is_respected_after_timeout_retry(self) -> None: """For successful ```urlopen(release_conn=False)```, the connection isn't released, even after a retry. This test allows a retry: one request fails, the next request succeeds. This is a regression test for issue #651 [1], where the connection would be released if the initial request failed, even if a retry succeeded. [1] <https://github.com/urllib3/urllib3/issues/651> """ def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] consume_socket(sock) # Close the connection, without sending any response (not even the # HTTP status line). This will trigger a `Timeout` on the client, # inside `urlopen()`. sock.close() # Expect a new request. Because we don't want to hang this thread, # we actually use select.select to confirm that a new request is # coming in: this lets us time the thread out. rlist, _, _ = select.select([listener], [], [], 5) assert rlist sock = listener.accept()[0] consume_socket(sock) # Send complete chunked response. sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Transfer-Encoding: chunked\r\n" b"\r\n" b"8\r\n" b"12345678\r\n" b"0\r\n\r\n" ) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port, maxsize=1) as pool: # First request should fail, but the timeout and `retries=1` should # save it. response = pool.urlopen( "GET", "/", retries=1, release_conn=False, preload_content=False, timeout=LONG_TIMEOUT, ) # The connection should still be on the response object, and none # should be in the pool. We opened two though. assert pool.num_connections == 2 assert pool.pool is not None assert pool.pool.qsize() == 0 assert response.connection is not None # Consume the data. This should put the connection back. response.read() assert pool.pool.qsize() == 1 assert response.connection is None def test_socket_close_socket_then_file(self) -> None: quit_event = threading.Event() def consume_ssl_socket( listener: socket.socket, ) -> None: try: with listener.accept()[0] as sock, original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) as ssl_sock: consume_socket(ssl_sock, quit_event=quit_event) except (ConnectionResetError, ConnectionAbortedError, OSError): pass self._start_server(consume_ssl_socket, quit_event=quit_event) with socket.create_connection( (self.host, self.port) ) as sock, contextlib.closing( ssl_wrap_socket(sock, server_hostname=self.host, ca_certs=DEFAULT_CA) ) as ssl_sock, ssl_sock.makefile( "rb" ) as f: ssl_sock.close() f.close() with pytest.raises(OSError): ssl_sock.sendall(b"hello") assert ssl_sock.fileno() == -1 def test_socket_close_stays_open_with_makefile_open(self) -> None: quit_event = threading.Event() def consume_ssl_socket(listener: socket.socket) -> None: try: with listener.accept()[0] as sock, original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) as ssl_sock: consume_socket(ssl_sock, quit_event=quit_event) except (ConnectionResetError, ConnectionAbortedError, OSError): pass self._start_server(consume_ssl_socket, quit_event=quit_event) with socket.create_connection( (self.host, self.port) ) as sock, contextlib.closing( ssl_wrap_socket(sock, server_hostname=self.host, ca_certs=DEFAULT_CA) ) as ssl_sock, ssl_sock.makefile( "rb" ): ssl_sock.close() ssl_sock.close() ssl_sock.sendall(b"hello") assert ssl_sock.fileno() > 0 class TestProxyManager(SocketDummyServerTestCase): def test_simple(self) -> None: def echo_socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(buf), buf.decode("utf-8")) ).encode("utf-8") ) sock.close() self._start_server(echo_socket_handler) base_url = f"http://{self.host}:{self.port}" with proxy_from_url(base_url) as proxy: r = proxy.request("GET", "http://google.com/") assert r.status == 200 # FIXME: The order of the headers is not predictable right now. We # should fix that someday (maybe when we migrate to # OrderedDict/MultiDict). assert sorted(r.data.split(b"\r\n")) == sorted( [ b"GET http://google.com/ HTTP/1.1", b"Host: google.com", b"Accept-Encoding: identity", b"Accept: */*", b"User-Agent: " + _get_default_user_agent().encode("utf-8"), b"", b"", ] ) def test_headers(self) -> None: def echo_socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(buf), buf.decode("utf-8")) ).encode("utf-8") ) sock.close() self._start_server(echo_socket_handler) base_url = f"http://{self.host}:{self.port}" # Define some proxy headers. proxy_headers = HTTPHeaderDict({"For The Proxy": "YEAH!"}) with proxy_from_url(base_url, proxy_headers=proxy_headers) as proxy: conn = proxy.connection_from_url("http://www.google.com/") r = conn.urlopen("GET", "http://www.google.com/", assert_same_host=False) assert r.status == 200 # FIXME: The order of the headers is not predictable right now. We # should fix that someday (maybe when we migrate to # OrderedDict/MultiDict). assert b"For The Proxy: YEAH!\r\n" in r.data def test_retries(self) -> None: close_event = Event() def echo_socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] # First request, which should fail sock.close() # Second request sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( ( "HTTP/1.1 200 OK\r\n" "Content-Type: text/plain\r\n" "Content-Length: %d\r\n" "\r\n" "%s" % (len(buf), buf.decode("utf-8")) ).encode("utf-8") ) sock.close() close_event.set() self._start_server(echo_socket_handler) base_url = f"http://{self.host}:{self.port}" with proxy_from_url(base_url) as proxy: conn = proxy.connection_from_url("http://www.google.com") r = conn.urlopen( "GET", "http://www.google.com", assert_same_host=False, retries=1 ) assert r.status == 200 close_event.wait(timeout=LONG_TIMEOUT) with pytest.raises(ProxyError): conn.urlopen( "GET", "http://www.google.com", assert_same_host=False, retries=False, ) def test_connect_reconn(self) -> None: def proxy_ssl_one(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) s = buf.decode("utf-8") if not s.startswith("CONNECT "): sock.send(b"HTTP/1.1 405 Method not allowed\r\nAllow: CONNECT\r\n\r\n") sock.close() return if not s.startswith(f"CONNECT {self.host}:443"): sock.send(b"HTTP/1.1 403 Forbidden\r\n\r\n") sock.close() return sock.send(b"HTTP/1.1 200 Connection Established\r\n\r\n") ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) ssl_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 2\r\n" b"Connection: close\r\n" b"\r\n" b"Hi" ) ssl_sock.close() def echo_socket_handler(listener: socket.socket) -> None: proxy_ssl_one(listener) proxy_ssl_one(listener) self._start_server(echo_socket_handler) base_url = f"http://{self.host}:{self.port}" with proxy_from_url(base_url, ca_certs=DEFAULT_CA) as proxy: url = f"https://{self.host}" conn = proxy.connection_from_url(url) r = conn.urlopen("GET", url, retries=0) assert r.status == 200 r = conn.urlopen("GET", url, retries=0) assert r.status == 200 def test_connect_ipv6_addr(self) -> None: ipv6_addr = "2001:4998:c:a06::2:4008" def echo_socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) s = buf.decode("utf-8") if s.startswith(f"CONNECT [{ipv6_addr}]:443"): sock.send(b"HTTP/1.1 200 Connection Established\r\n\r\n") ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ) buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) ssl_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 2\r\n" b"Connection: close\r\n" b"\r\n" b"Hi" ) ssl_sock.close() else: sock.close() self._start_server(echo_socket_handler) base_url = f"http://{self.host}:{self.port}" with proxy_from_url(base_url, cert_reqs="NONE") as proxy: url = f"https://[{ipv6_addr}]" conn = proxy.connection_from_url(url) try: with pytest.warns(InsecureRequestWarning): r = conn.urlopen("GET", url, retries=0) assert r.status == 200 except MaxRetryError: pytest.fail("Invalid IPv6 format in HTTP CONNECT request") @pytest.mark.parametrize("target_scheme", ["http", "https"]) def test_https_proxymanager_connected_to_http_proxy( self, target_scheme: str ) -> None: errored = Event() def http_socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] sock.send(b"HTTP/1.0 501 Not Implemented\r\nConnection: close\r\n\r\n") errored.wait() sock.close() self._start_server(http_socket_handler) base_url = f"https://{self.host}:{self.port}" with ProxyManager(base_url, cert_reqs="NONE") as proxy: with pytest.raises(MaxRetryError) as e: proxy.request("GET", f"{target_scheme}://example.com", retries=0) errored.set() # Avoid a ConnectionAbortedError on Windows. assert type(e.value.reason) is ProxyError assert "Your proxy appears to only use HTTP and not HTTPS" in str( e.value.reason ) class TestSSL(SocketDummyServerTestCase): def test_ssl_failure_midway_through_conn(self) -> None: def socket_handler(listener: socket.socket) -> None: with listener.accept()[0] as sock, sock.dup() as sock2: ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) # Deliberately send from the non-SSL socket. sock2.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 2\r\n" b"\r\n" b"Hi" ) ssl_sock.close() self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool: with pytest.raises( SSLError, match=r"(wrong version number|record overflow|record layer failure)", ): pool.request("GET", "/", retries=False) def test_ssl_read_timeout(self) -> None: timed_out = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] # disable Nagle's algorithm so there's no delay in sending a partial body sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True) ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ) buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) # Send incomplete message (note Content-Length) ssl_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 10\r\n" b"\r\n" b"Hi-" ) timed_out.wait() sock.close() ssl_sock.close() self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool: response = pool.urlopen( "GET", "/", retries=0, preload_content=False, timeout=LONG_TIMEOUT ) try: with pytest.raises(ReadTimeoutError): response.read() finally: timed_out.set() def test_ssl_failed_fingerprint_verification(self) -> None: def socket_handler(listener: socket.socket) -> None: for i in range(2): sock = listener.accept()[0] try: ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) except (ssl.SSLError, ConnectionResetError, ConnectionAbortedError): pass else: with ssl_sock: try: ssl_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 5\r\n\r\n" b"Hello" ) except (ssl.SSLEOFError, ConnectionResetError, BrokenPipeError): pass sock.close() self._start_server(socket_handler) # GitHub's fingerprint. Valid, but not matching. fingerprint = "A0:C4:A7:46:00:ED:A7:2D:C0:BE:CB:9A:8C:B6:07:CA:58:EE:74:5E" def request() -> None: pool = HTTPSConnectionPool( self.host, self.port, assert_fingerprint=fingerprint, cert_reqs="CERT_NONE", ) try: timeout = Timeout(connect=LONG_TIMEOUT, read=SHORT_TIMEOUT) response = pool.urlopen( "GET", "/", preload_content=False, retries=0, timeout=timeout ) response.read() finally: pool.close() with pytest.raises(MaxRetryError) as cm: request() assert type(cm.value.reason) is SSLError assert str(cm.value.reason) == ( "Fingerprints did not match. Expected " '"a0c4a74600eda72dc0becb9a8cb607ca58ee745e", got ' '"728b554c9afc1e88a11cad1bb2e7cc3edbc8f98a"' ) # Should not hang, see https://github.com/urllib3/urllib3/issues/529 with pytest.raises(MaxRetryError) as cm2: request() assert type(cm2.value.reason) is SSLError assert str(cm2.value.reason) == ( "Fingerprints did not match. Expected " '"a0c4a74600eda72dc0becb9a8cb607ca58ee745e", got ' '"728b554c9afc1e88a11cad1bb2e7cc3edbc8f98a"' ) def test_retry_ssl_error(self) -> None: def socket_handler(listener: socket.socket) -> None: # first request, trigger an SSLError sock = listener.accept()[0] sock2 = sock.dup() ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ) buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) # Deliberately send from the non-SSL socket to trigger an SSLError sock2.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 4\r\n" b"\r\n" b"Fail" ) sock2.close() ssl_sock.close() # retried request sock = listener.accept()[0] ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ) buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) ssl_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 7\r\n\r\n" b"Success" ) ssl_sock.close() self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port, ca_certs=DEFAULT_CA) as pool: response = pool.urlopen("GET", "/", retries=1) assert response.data == b"Success" def test_ssl_load_default_certs_when_empty(self) -> None: def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] try: ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) except (ssl.SSLError, OSError): return buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) ssl_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 5\r\n\r\n" b"Hello" ) ssl_sock.close() sock.close() context = mock.create_autospec(ssl_.SSLContext) context.load_default_certs = mock.Mock() context.options = 0 class MockSSLSocket: def __init__( self, sock: socket.socket, *args: object, **kwargs: object ) -> None: self._sock = sock def close(self) -> None: self._sock.close() context.wrap_socket = MockSSLSocket with mock.patch("urllib3.util.ssl_.SSLContext", lambda *_, **__: context): self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port) as pool: # Without a proper `SSLContext`, this request will fail in some # arbitrary way, but we only want to know if load_default_certs() was # called, which is why we accept any `Exception` here. with pytest.raises(Exception): pool.request("GET", "/", timeout=SHORT_TIMEOUT) context.load_default_certs.assert_called_with() def test_ssl_dont_load_default_certs_when_given(self) -> None: def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] try: ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) except (ssl.SSLError, OSError): return buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += ssl_sock.recv(65536) ssl_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: 5\r\n\r\n" b"Hello" ) ssl_sock.close() sock.close() context = mock.create_autospec(ssl_.SSLContext) context.load_default_certs = mock.Mock() context.options = 0 class MockSSLSocket: def __init__( self, sock: socket.socket, *args: object, **kwargs: object ) -> None: self._sock = sock def close(self) -> None: self._sock.close() context.wrap_socket = MockSSLSocket with mock.patch("urllib3.util.ssl_.SSLContext", lambda *_, **__: context): for kwargs in [ {"ca_certs": "/a"}, {"ca_cert_dir": "/a"}, {"ca_certs": "a", "ca_cert_dir": "a"}, {"ssl_context": context}, ]: self._start_server(socket_handler) with HTTPSConnectionPool(self.host, self.port, **kwargs) as pool: with pytest.raises(Exception): pool.request("GET", "/", timeout=SHORT_TIMEOUT) context.load_default_certs.assert_not_called() def test_load_verify_locations_exception(self) -> None: """ Ensure that load_verify_locations raises SSLError for all backends """ with pytest.raises(SSLError): ssl_wrap_socket(None, ca_certs="/tmp/fake-file") # type: ignore[call-overload] def test_ssl_custom_validation_failure_terminates(self, tmpdir: Path) -> None: """ Ensure that the underlying socket is terminated if custom validation fails. """ server_closed = Event() def is_closed_socket(sock: socket.socket) -> bool: try: sock.settimeout(SHORT_TIMEOUT) except OSError: return True return False def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] try: _ = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) except ConnectionResetError: return except ssl.SSLError as e: assert "alert unknown ca" in str(e) if is_closed_socket(sock): server_closed.set() self._start_server(socket_handler) # client uses a different ca other_ca = trustme.CA() other_ca_path = str(tmpdir / "ca.pem") other_ca.cert_pem.write_to_path(other_ca_path) with HTTPSConnectionPool( self.host, self.port, cert_reqs="REQUIRED", ca_certs=other_ca_path ) as pool: with pytest.raises(SSLError): pool.request("GET", "/", retries=False, timeout=LONG_TIMEOUT) assert server_closed.wait(LONG_TIMEOUT), "The socket was not terminated" def _run_preload(self, pool: HTTPSConnectionPool, content_length: int) -> None: response = pool.request("GET", "/") assert len(response.data) == content_length def _run_read_None(self, pool: HTTPSConnectionPool, content_length: int) -> None: response = pool.request("GET", "/", preload_content=False) assert len(response.read(None)) == content_length assert response.read(None) == b"" def _run_read_amt(self, pool: HTTPSConnectionPool, content_length: int) -> None: response = pool.request("GET", "/", preload_content=False) assert len(response.read(content_length)) == content_length assert response.read(5) == b"" def _run_read1_None(self, pool: HTTPSConnectionPool, content_length: int) -> None: response = pool.request("GET", "/", preload_content=False) remaining = content_length while True: chunk = response.read1(None) if not chunk: break remaining -= len(chunk) assert remaining == 0 def _run_read1_amt(self, pool: HTTPSConnectionPool, content_length: int) -> None: response = pool.request("GET", "/", preload_content=False) remaining = content_length while True: chunk = response.read1(content_length) if not chunk: break remaining -= len(chunk) assert remaining == 0 @pytest.mark.integration @pytest.mark.parametrize( "method", [_run_preload, _run_read_None, _run_read_amt, _run_read1_None, _run_read1_amt], ) def test_requesting_large_resources_via_ssl( self, method: typing.Callable[[typing.Any, HTTPSConnectionPool, int], None] ) -> None: """ Ensure that it is possible to read 2 GiB or more via an SSL socket. https://github.com/urllib3/urllib3/issues/2513 """ content_length = 2**31 # (`int` max value in C) + 1. ssl_ready = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] ssl_sock = original_ssl_wrap_socket( sock, server_side=True, keyfile=DEFAULT_CERTS["keyfile"], certfile=DEFAULT_CERTS["certfile"], ca_certs=DEFAULT_CA, ) ssl_ready.set() while not ssl_sock.recv(65536).endswith(b"\r\n\r\n"): continue ssl_sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Type: text/plain\r\n" b"Content-Length: %d\r\n\r\n" % content_length ) chunks = 2 for i in range(chunks): ssl_sock.sendall(bytes(content_length // chunks)) ssl_sock.close() sock.close() self._start_server(socket_handler) ssl_ready.wait(5) with HTTPSConnectionPool( self.host, self.port, ca_certs=DEFAULT_CA, retries=False ) as pool: method(self, pool, content_length) class TestErrorWrapping(SocketDummyServerTestCase): def test_bad_statusline(self) -> None: self.start_response_handler( b"HTTP/1.1 Omg What Is This?\r\n" b"Content-Length: 0\r\n" b"\r\n" ) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: with pytest.raises(ProtocolError): pool.request("GET", "/") def test_unknown_protocol(self) -> None: self.start_response_handler( b"HTTP/1000 200 OK\r\n" b"Content-Length: 0\r\n" b"\r\n" ) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: with pytest.raises(ProtocolError): pool.request("GET", "/") class TestHeaders(SocketDummyServerTestCase): def test_httplib_headers_case_insensitive(self) -> None: self.start_response_handler( b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n" b"Content-type: text/plain\r\n" b"\r\n" ) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: HEADERS = {"Content-Length": "0", "Content-type": "text/plain"} r = pool.request("GET", "/") assert HEADERS == dict(r.headers.items()) # to preserve case sensitivity def start_parsing_handler(self) -> None: self.parsed_headers: typing.OrderedDict[str, str] = OrderedDict() self.received_headers: list[bytes] = [] def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) self.received_headers = [ header for header in buf.split(b"\r\n")[1:] if header ] for header in self.received_headers: (key, value) = header.split(b": ") self.parsed_headers[key.decode("ascii")] = value.decode("ascii") sock.send(b"HTTP/1.1 204 No Content\r\nContent-Length: 0\r\n\r\n") sock.close() self._start_server(socket_handler) def test_headers_are_sent_with_the_original_case(self) -> None: headers = {"foo": "bar", "bAz": "quux"} self.start_parsing_handler() expected_headers = { "Accept-Encoding": "identity", "Host": f"{self.host}:{self.port}", "User-Agent": _get_default_user_agent(), } expected_headers.update(headers) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.request("GET", "/", headers=HTTPHeaderDict(headers)) assert expected_headers == self.parsed_headers def test_ua_header_can_be_overridden(self) -> None: headers = {"uSeR-AgENt": "Definitely not urllib3!"} self.start_parsing_handler() expected_headers = { "Accept-Encoding": "identity", "Host": f"{self.host}:{self.port}", } expected_headers.update(headers) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.request("GET", "/", headers=HTTPHeaderDict(headers)) assert expected_headers == self.parsed_headers def test_request_headers_are_sent_in_the_original_order(self) -> None: # NOTE: Probability this test gives a false negative is 1/(K!) K = 16 # NOTE: Provide headers in non-sorted order (i.e. reversed) # so that if the internal implementation tries to sort them, # a change will be detected. expected_request_headers = [ (f"X-Header-{int(i)}", str(i)) for i in reversed(range(K)) ] def filter_non_x_headers( d: typing.OrderedDict[str, str] ) -> list[tuple[str, str]]: return [(k, v) for (k, v) in d.items() if k.startswith("X-Header-")] self.start_parsing_handler() with HTTPConnectionPool(self.host, self.port, retries=False) as pool: pool.request("GET", "/", headers=OrderedDict(expected_request_headers)) assert expected_request_headers == filter_non_x_headers(self.parsed_headers) @resolvesLocalhostFQDN() def test_request_host_header_ignores_fqdn_dot(self) -> None: self.start_parsing_handler() with HTTPConnectionPool(self.host + ".", self.port, retries=False) as pool: pool.request("GET", "/") self.assert_header_received( self.received_headers, "Host", f"{self.host}:{self.port}" ) def test_response_headers_are_returned_in_the_original_order(self) -> None: # NOTE: Probability this test gives a false negative is 1/(K!) K = 16 # NOTE: Provide headers in non-sorted order (i.e. reversed) # so that if the internal implementation tries to sort them, # a change will be detected. expected_response_headers = [ (f"X-Header-{int(i)}", str(i)) for i in reversed(range(K)) ] def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( b"HTTP/1.1 200 OK\r\n" + b"\r\n".join( [ (k.encode("utf8") + b": " + v.encode("utf8")) for (k, v) in expected_response_headers ] ) + b"\r\n" ) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/", retries=0) actual_response_headers = [ (k, v) for (k, v) in r.headers.items() if k.startswith("X-Header-") ] assert expected_response_headers == actual_response_headers @pytest.mark.parametrize( "method_type, body_type", [ ("GET", None), ("POST", None), ("POST", "bytes"), ("POST", "bytes-io"), ], ) def test_headers_sent_with_add( self, method_type: str, body_type: str | None ) -> None: """ Confirm that when adding headers with combine=True that we simply append to the most recent value, rather than create a new header line. """ body: None | bytes | io.BytesIO if body_type is None: body = None expected = b"\r\n\r\n" elif body_type == "bytes": body = b"my-body" expected = b"\r\n\r\nmy-body" elif body_type == "bytes-io": body = io.BytesIO(b"bytes-io-body") body.seek(0, 0) expected = b"bytes-io-body\r\n0\r\n\r\n" else: raise ValueError("Unknown body type") buffer: bytes = b"" def socket_handler(listener: socket.socket) -> None: nonlocal buffer sock = listener.accept()[0] sock.settimeout(0) while expected not in buffer: with contextlib.suppress(BlockingIOError): buffer += sock.recv(65536) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: example.com\r\n" b"Content-Length: 0\r\n\r\n" ) sock.close() self._start_server(socket_handler) headers = HTTPHeaderDict() headers.add("A", "1") headers.add("C", "3") headers.add("B", "2") headers.add("B", "3") headers.add("A", "4", combine=False) headers.add("C", "5", combine=True) headers.add("C", "6") with HTTPConnectionPool(self.host, self.port, retries=False) as pool: r = pool.request( method_type, "/", body=body, headers=headers, ) assert r.status == 200 assert b"A: 1\r\nA: 4\r\nC: 3, 5\r\nC: 6\r\nB: 2\r\nB: 3" in buffer class TestBrokenHeaders(SocketDummyServerTestCase): def _test_broken_header_parsing( self, headers: list[bytes], unparsed_data_check: str | None = None ) -> None: self.start_response_handler( ( b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n" b"Content-type: text/plain\r\n" ) + b"\r\n".join(headers) + b"\r\n\r\n" ) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: with LogRecorder() as logs: pool.request("GET", "/") for record in logs: if ( "Failed to parse headers" in record.msg and type(record.args) is tuple and _url_from_pool(pool, "/") == record.args[0] ): if ( unparsed_data_check is None or unparsed_data_check in record.getMessage() ): return pytest.fail("Missing log about unparsed headers") def test_header_without_name(self) -> None: self._test_broken_header_parsing([b": Value", b"Another: Header"]) def test_header_without_name_or_value(self) -> None: self._test_broken_header_parsing([b":", b"Another: Header"]) def test_header_without_colon_or_value(self) -> None: self._test_broken_header_parsing( [b"Broken Header", b"Another: Header"], "Broken Header" ) class TestHeaderParsingContentType(SocketDummyServerTestCase): def _test_okay_header_parsing(self, header: bytes) -> None: self.start_response_handler( (b"HTTP/1.1 200 OK\r\n" b"Content-Length: 0\r\n") + header + b"\r\n\r\n" ) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: with LogRecorder() as logs: pool.request("GET", "/") for record in logs: assert "Failed to parse headers" not in record.msg def test_header_text_plain(self) -> None: self._test_okay_header_parsing(b"Content-type: text/plain") def test_header_message_rfc822(self) -> None: self._test_okay_header_parsing(b"Content-type: message/rfc822") class TestHEAD(SocketDummyServerTestCase): def test_chunked_head_response_does_not_hang(self) -> None: self.start_response_handler( b"HTTP/1.1 200 OK\r\n" b"Transfer-Encoding: chunked\r\n" b"Content-type: text/plain\r\n" b"\r\n" ) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: r = pool.request("HEAD", "/", timeout=LONG_TIMEOUT, preload_content=False) # stream will use the read_chunked method here. assert [] == list(r.stream()) def test_empty_head_response_does_not_hang(self) -> None: self.start_response_handler( b"HTTP/1.1 200 OK\r\n" b"Content-Length: 256\r\n" b"Content-type: text/plain\r\n" b"\r\n" ) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: r = pool.request("HEAD", "/", timeout=LONG_TIMEOUT, preload_content=False) # stream will use the read method here. assert [] == list(r.stream()) class TestStream(SocketDummyServerTestCase): def test_stream_none_unchunked_response_does_not_hang(self) -> None: done_event = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Length: 12\r\n" b"Content-type: text/plain\r\n" b"\r\n" b"hello, world" ) done_event.wait(5) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: r = pool.request("GET", "/", timeout=LONG_TIMEOUT, preload_content=False) # Stream should read to the end. assert [b"hello, world"] == list(r.stream(None)) done_event.set() def test_large_compressed_stream(self) -> None: done_event = Event() expected_total_length = 296085 def socket_handler(listener: socket.socket) -> None: compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS) data = compress.compress(b"x" * expected_total_length) data += compress.flush() sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Content-Length: %d\r\n" b"Content-Encoding: gzip\r\n" b"\r\n" % (len(data),) + data ) done_event.wait(5) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port, retries=False) as pool: r = pool.request("GET", "/", timeout=LONG_TIMEOUT, preload_content=False) # Chunks must all be equal or less than 10240 # and only the last chunk is allowed to be smaller # than 10240. total_length = 0 chunks_smaller_than_10240 = 0 for chunk in r.stream(10240, decode_content=True): assert 0 < len(chunk) <= 10240 if len(chunk) < 10240: chunks_smaller_than_10240 += 1 else: assert chunks_smaller_than_10240 == 0 total_length += len(chunk) assert chunks_smaller_than_10240 == 1 assert expected_total_length == total_length done_event.set() class TestBadContentLength(SocketDummyServerTestCase): def test_enforce_content_length_get(self) -> None: done_event = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Length: 22\r\n" b"Content-type: text/plain\r\n" b"\r\n" b"hello, world" ) done_event.wait(LONG_TIMEOUT) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port, maxsize=1) as conn: # Test stream read when content length less than headers claim get_response = conn.request( "GET", url="/", preload_content=False, enforce_content_length=True ) data = get_response.stream(100) with pytest.raises(ProtocolError, match="12 bytes read, 10 more expected"): next(data) done_event.set() def test_enforce_content_length_no_body(self) -> None: done_event = Event() def socket_handler(listener: socket.socket) -> None: sock = listener.accept()[0] buf = b"" while not buf.endswith(b"\r\n\r\n"): buf += sock.recv(65536) sock.send( b"HTTP/1.1 200 OK\r\n" b"Content-Length: 22\r\n" b"Content-type: text/plain\r\n" b"\r\n" ) done_event.wait(1) sock.close() self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port, maxsize=1) as conn: # Test stream on 0 length body head_response = conn.request( "HEAD", url="/", preload_content=False, enforce_content_length=True ) data = [chunk for chunk in head_response.stream(1)] assert len(data) == 0 done_event.set() class TestRetryPoolSizeDrainFail(SocketDummyServerTestCase): def test_pool_size_retry_drain_fail(self) -> None: def socket_handler(listener: socket.socket) -> None: for _ in range(2): sock = listener.accept()[0] while not sock.recv(65536).endswith(b"\r\n\r\n"): pass # send a response with an invalid content length -- this causes # a ProtocolError to raise when trying to drain the connection sock.send( b"HTTP/1.1 404 NOT FOUND\r\n" b"Content-Length: 1000\r\n" b"Content-Type: text/plain\r\n" b"\r\n" ) sock.close() self._start_server(socket_handler) retries = Retry(total=1, raise_on_status=False, status_forcelist=[404]) with HTTPConnectionPool( self.host, self.port, maxsize=10, retries=retries, block=True ) as pool: pool.urlopen("GET", "/not_found", preload_content=False) assert pool.num_connections == 1 class TestBrokenPipe(SocketDummyServerTestCase): @notWindows() def test_ignore_broken_pipe_errors(self, monkeypatch: pytest.MonkeyPatch) -> None: # On Windows an aborted connection raises an error on # attempts to read data out of a socket that's been closed. sock_shut = Event() orig_connect = HTTPConnection.connect # a buffer that will cause two sendall calls buf = "a" * 1024 * 1024 * 4 def connect_and_wait(*args: typing.Any, **kw: typing.Any) -> None: ret = orig_connect(*args, **kw) assert sock_shut.wait(5) return ret def socket_handler(listener: socket.socket) -> None: for i in range(2): sock = listener.accept()[0] sock.send( b"HTTP/1.1 404 Not Found\r\n" b"Connection: close\r\n" b"Content-Length: 10\r\n" b"\r\n" b"xxxxxxxxxx" ) sock.shutdown(socket.SHUT_RDWR) sock_shut.set() sock.close() monkeypatch.setattr(HTTPConnection, "connect", connect_and_wait) self._start_server(socket_handler) with HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("POST", "/", body=buf) assert r.status == 404 assert r.headers["content-length"] == "10" assert r.data == b"xxxxxxxxxx" r = pool.request("POST", "/admin", chunked=True, body=buf) assert r.status == 404 assert r.headers["content-length"] == "10" assert r.data == b"xxxxxxxxxx" class TestMultipartResponse(SocketDummyServerTestCase): def test_multipart_assert_header_parsing_no_defects(self) -> None: quit_event = threading.Event() def socket_handler(listener: socket.socket) -> None: for _ in range(2): listener.settimeout(LONG_TIMEOUT) while True: if quit_event and quit_event.is_set(): return try: sock = listener.accept()[0] break except (TimeoutError, socket.timeout): continue sock.settimeout(LONG_TIMEOUT) while True: if quit_event and quit_event.is_set(): sock.close() return if sock.recv(65536).endswith(b"\r\n\r\n"): break sock.sendall( b"HTTP/1.1 404 Not Found\r\n" b"Server: example.com\r\n" b"Content-Type: multipart/mixed; boundary=36eeb8c4e26d842a\r\n" b"Content-Length: 73\r\n" b"\r\n" b"--36eeb8c4e26d842a\r\n" b"Content-Type: text/plain\r\n" b"\r\n" b"1\r\n" b"--36eeb8c4e26d842a--\r\n", ) sock.close() self._start_server(socket_handler, quit_event=quit_event) from urllib3.connectionpool import log with mock.patch.object(log, "warning") as log_warning: with HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT) as pool: resp = pool.urlopen("GET", "/") assert resp.status == 404 assert ( resp.headers["content-type"] == "multipart/mixed; boundary=36eeb8c4e26d842a" ) assert len(resp.data) == 73 log_warning.assert_not_called() class TestContentFraming(SocketDummyServerTestCase): @pytest.mark.parametrize("content_length", [None, 0]) @pytest.mark.parametrize("method", ["POST", "PUT", "PATCH"]) def test_content_length_0_by_default( self, method: str, content_length: int | None ) -> None: buffer = bytearray() def socket_handler(listener: socket.socket) -> None: nonlocal buffer sock = listener.accept()[0] while not buffer.endswith(b"\r\n\r\n"): buffer += sock.recv(65536) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: example.com\r\n" b"Content-Length: 0\r\n\r\n" ) sock.close() self._start_server(socket_handler) headers = {} if content_length is not None: headers["Content-Length"] = str(content_length) with HTTPConnectionPool(self.host, self.port, timeout=3) as pool: resp = pool.request(method, "/") assert resp.status == 200 sent_bytes = bytes(buffer) assert b"Accept-Encoding: identity\r\n" in sent_bytes assert b"Content-Length: 0\r\n" in sent_bytes assert b"transfer-encoding" not in sent_bytes.lower() @pytest.mark.parametrize("chunked", [True, False]) @pytest.mark.parametrize("method", ["POST", "PUT", "PATCH"]) @pytest.mark.parametrize("body_type", ["file", "generator", "bytes"]) def test_chunked_specified( self, method: str, chunked: bool, body_type: str ) -> None: quit_event = threading.Event() buffer = bytearray() expected_bytes = b"\r\n\r\na\r\nxxxxxxxxxx\r\n0\r\n\r\n" def socket_handler(listener: socket.socket) -> None: nonlocal buffer listener.settimeout(LONG_TIMEOUT) while True: if quit_event.is_set(): return try: sock = listener.accept()[0] break except (TimeoutError, socket.timeout): continue sock.settimeout(LONG_TIMEOUT) while expected_bytes not in buffer: if quit_event.is_set(): return with contextlib.suppress(BlockingIOError): buffer += sock.recv(65536) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: example.com\r\n" b"Content-Length: 0\r\n\r\n" ) sock.close() self._start_server(socket_handler, quit_event=quit_event) body: typing.Any if body_type == "generator": def body_generator() -> typing.Generator[bytes, None, None]: yield b"x" * 10 body = body_generator() elif body_type == "file": body = io.BytesIO(b"x" * 10) body.seek(0, 0) else: if chunked is False: pytest.skip("urllib3 uses Content-Length in this case") body = b"x" * 10 with HTTPConnectionPool( self.host, self.port, timeout=LONG_TIMEOUT, retries=False ) as pool: resp = pool.request(method, "/", chunked=chunked, body=body) assert resp.status == 200 sent_bytes = bytes(buffer) assert sent_bytes.count(b":") == 5 assert b"Host: localhost:" in sent_bytes assert b"Accept-Encoding: identity\r\n" in sent_bytes assert b"Transfer-Encoding: chunked\r\n" in sent_bytes assert b"User-Agent: python-urllib3/" in sent_bytes assert b"content-length" not in sent_bytes.lower() assert expected_bytes in sent_bytes @pytest.mark.parametrize("method", ["POST", "PUT", "PATCH"]) @pytest.mark.parametrize( "body_type", ["file", "generator", "bytes", "bytearray", "file_text"] ) def test_chunked_not_specified(self, method: str, body_type: str) -> None: buffer = bytearray() expected_bytes: bytes body: typing.Any if body_type == "generator": def body_generator() -> typing.Generator[bytes, None, None]: yield b"x" * 10 body = body_generator() should_be_chunked = True elif body_type == "file": body = io.BytesIO(b"x" * 10) body.seek(0, 0) should_be_chunked = True elif body_type == "file_text": body = io.StringIO("x" * 10) body.seek(0, 0) should_be_chunked = True elif body_type == "bytearray": body = bytearray(b"x" * 10) should_be_chunked = False else: body = b"x" * 10 should_be_chunked = False if should_be_chunked: expected_bytes = b"\r\n\r\na\r\nxxxxxxxxxx\r\n0\r\n\r\n" else: expected_bytes = b"\r\n\r\nxxxxxxxxxx" def socket_handler(listener: socket.socket) -> None: nonlocal buffer sock = listener.accept()[0] sock.settimeout(0) while expected_bytes not in buffer: with contextlib.suppress(BlockingIOError): buffer += sock.recv(65536) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: example.com\r\n" b"Content-Length: 0\r\n\r\n" ) sock.close() self._start_server(socket_handler) with HTTPConnectionPool( self.host, self.port, timeout=LONG_TIMEOUT, retries=False ) as pool: resp = pool.request(method, "/", body=body) assert resp.status == 200 sent_bytes = bytes(buffer) assert sent_bytes.count(b":") == 5 assert b"Host: localhost:" in sent_bytes assert b"Accept-Encoding: identity\r\n" in sent_bytes assert b"User-Agent: python-urllib3/" in sent_bytes if should_be_chunked: assert b"content-length" not in sent_bytes.lower() assert b"Transfer-Encoding: chunked\r\n" in sent_bytes assert expected_bytes in sent_bytes else: assert b"Content-Length: 10\r\n" in sent_bytes assert b"transfer-encoding" not in sent_bytes.lower() assert sent_bytes.endswith(expected_bytes) @pytest.mark.parametrize( "header_transform", [str.lower, str.title, str.upper], ) @pytest.mark.parametrize( ["header", "header_value", "expected"], [ ("content-length", "10", b": 10\r\n\r\nxxxxxxxx"), ( "transfer-encoding", "chunked", b": chunked\r\n\r\n8\r\nxxxxxxxx\r\n0\r\n\r\n", ), ], ) def test_framing_set_via_headers( self, header_transform: typing.Callable[[str], str], header: str, header_value: str, expected: bytes, ) -> None: buffer = bytearray() def socket_handler(listener: socket.socket) -> None: nonlocal buffer sock = listener.accept()[0] sock.settimeout(0) while expected not in buffer: with contextlib.suppress(BlockingIOError): buffer += sock.recv(65536) sock.sendall( b"HTTP/1.1 200 OK\r\n" b"Server: example.com\r\n" b"Content-Length: 0\r\n\r\n" ) sock.close() self._start_server(socket_handler) with HTTPConnectionPool( self.host, self.port, timeout=LONG_TIMEOUT, retries=False ) as pool: resp = pool.request( "POST", "/", body=b"xxxxxxxx", headers={header_transform(header): header_value}, ) assert resp.status == 200 sent_bytes = bytes(buffer) assert sent_bytes.endswith(expected) with_dummyserver/test_no_ssl.py 0000644 00000002171 15025234504 0013070 0 ustar 00 """ Test connections without the builtin ssl module Note: Import urllib3 inside the test functions to get the importblocker to work """ from __future__ import annotations import pytest import urllib3 from dummyserver.testcase import ( HTTPSHypercornDummyServerTestCase, HypercornDummyServerTestCase, ) from urllib3.exceptions import InsecureRequestWarning from ..test_no_ssl import TestWithoutSSL class TestHTTPWithoutSSL(HypercornDummyServerTestCase, TestWithoutSSL): def test_simple(self) -> None: with urllib3.HTTPConnectionPool(self.host, self.port) as pool: r = pool.request("GET", "/") assert r.status == 200, r.data class TestHTTPSWithoutSSL(HTTPSHypercornDummyServerTestCase, TestWithoutSSL): def test_simple(self) -> None: with urllib3.HTTPSConnectionPool( self.host, self.port, cert_reqs="NONE" ) as pool: with pytest.warns(InsecureRequestWarning): try: pool.request("GET", "/") except urllib3.exceptions.SSLError as e: assert "SSL module is not available" in str(e) with_dummyserver/__init__.py 0000644 00000000000 15025234504 0012260 0 ustar 00 port_helpers.py 0000644 00000014116 15025234504 0007627 0 ustar 00 # These helpers are copied from test/support/socket_helper.py in the Python 3.9 standard # library test suite. from __future__ import annotations import socket # Don't use "localhost", since resolving it uses the DNS under recent # Windows versions (see issue #18792). HOST = "127.0.0.1" HOSTv6 = "::1" def find_unused_port( family: socket.AddressFamily = socket.AF_INET, socktype: socket.SocketKind = socket.SOCK_STREAM, ) -> int: """Returns an unused port that should be suitable for binding. This is achieved by creating a temporary socket with the same family and type as the 'sock' parameter (default is AF_INET, SOCK_STREAM), and binding it to the specified host address (defaults to 0.0.0.0) with the port set to 0, eliciting an unused ephemeral port from the OS. The temporary socket is then closed and deleted, and the ephemeral port is returned. Either this method or bind_port() should be used for any tests where a server socket needs to be bound to a particular port for the duration of the test. Which one to use depends on whether the calling code is creating a python socket, or if an unused port needs to be provided in a constructor or passed to an external program (i.e. the -accept argument to openssl's s_server mode). Always prefer bind_port() over find_unused_port() where possible. Hard coded ports should *NEVER* be used. As soon as a server socket is bound to a hard coded port, the ability to run multiple instances of the test simultaneously on the same host is compromised, which makes the test a ticking time bomb in a buildbot environment. On Unix buildbots, this may simply manifest as a failed test, which can be recovered from without intervention in most cases, but on Windows, the entire python process can completely and utterly wedge, requiring someone to log in to the buildbot and manually kill the affected process. (This is easy to reproduce on Windows, unfortunately, and can be traced to the SO_REUSEADDR socket option having different semantics on Windows versus Unix/Linux. On Unix, you can't have two AF_INET SOCK_STREAM sockets bind, listen and then accept connections on identical host/ports. An EADDRINUSE OSError will be raised at some point (depending on the platform and the order bind and listen were called on each socket). However, on Windows, if SO_REUSEADDR is set on the sockets, no EADDRINUSE will ever be raised when attempting to bind two identical host/ports. When accept() is called on each socket, the second caller's process will steal the port from the first caller, leaving them both in an awkwardly wedged state where they'll no longer respond to any signals or graceful kills, and must be forcibly killed via OpenProcess()/TerminateProcess(). The solution on Windows is to use the SO_EXCLUSIVEADDRUSE socket option instead of SO_REUSEADDR, which effectively affords the same semantics as SO_REUSEADDR on Unix. Given the propensity of Unix developers in the Open Source world compared to Windows ones, this is a common mistake. A quick look over OpenSSL's 0.9.8g source shows that they use SO_REUSEADDR when openssl.exe is called with the 's_server' option, for example. See http://bugs.python.org/issue2550 for more info. The following site also has a very thorough description about the implications of both REUSEADDR and EXCLUSIVEADDRUSE on Windows: http://msdn2.microsoft.com/en-us/library/ms740621(VS.85).aspx) XXX: although this approach is a vast improvement on previous attempts to elicit unused ports, it rests heavily on the assumption that the ephemeral port returned to us by the OS won't immediately be dished back out to some other process when we close and delete our temporary socket but before our calling code has a chance to bind the returned port. We can deal with this issue if/when we come across it.""" with socket.socket(family, socktype) as tempsock: port = bind_port(tempsock) del tempsock return port def bind_port(sock: socket.socket, host: str = HOST) -> int: """Bind the socket to a free port and return the port number. Relies on ephemeral ports in order to ensure we are using an unbound port. This is important as many tests may be running simultaneously, especially in a buildbot environment. This method raises an exception if the sock.family is AF_INET and sock.type is SOCK_STREAM, *and* the socket has SO_REUSEADDR or SO_REUSEPORT set on it. Tests should *never* set these socket options for TCP/IP sockets. The only case for setting these options is testing multicasting via multiple UDP sockets. Additionally, if the SO_EXCLUSIVEADDRUSE socket option is available (i.e. on Windows), it will be set on the socket. This will prevent anyone else from bind()'ing to our host/port for the duration of the test. """ if sock.family == socket.AF_INET and sock.type == socket.SOCK_STREAM: if hasattr(socket, "SO_REUSEADDR"): if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR) == 1: raise ValueError( "tests should never set the SO_REUSEADDR " "socket option on TCP/IP sockets!" ) if hasattr(socket, "SO_REUSEPORT"): try: if sock.getsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT) == 1: raise ValueError( "tests should never set the SO_REUSEPORT " "socket option on TCP/IP sockets!" ) except OSError: # Python's socket module was compiled using modern headers # thus defining SO_REUSEPORT but this process is running # under an older kernel that does not support SO_REUSEPORT. pass if hasattr(socket, "SO_EXCLUSIVEADDRUSE"): sock.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) sock.bind((host, 0)) port = sock.getsockname()[1] assert isinstance(port, int) return port test_proxymanager.py 0000644 00000007111 15025234504 0010671 0 ustar 00 from __future__ import annotations import pytest from urllib3.exceptions import MaxRetryError, NewConnectionError, ProxyError from urllib3.poolmanager import ProxyManager from urllib3.util.retry import Retry from urllib3.util.url import parse_url from .port_helpers import find_unused_port class TestProxyManager: @pytest.mark.parametrize("proxy_scheme", ["http", "https"]) def test_proxy_headers(self, proxy_scheme: str) -> None: url = "http://pypi.org/project/urllib3/" proxy_url = f"{proxy_scheme}://something:1234" with ProxyManager(proxy_url) as p: # Verify default headers default_headers = {"Accept": "*/*", "Host": "pypi.org"} headers = p._set_proxy_headers(url) assert headers == default_headers # Verify default headers don't overwrite provided headers provided_headers = { "Accept": "application/json", "custom": "header", "Host": "test.python.org", } headers = p._set_proxy_headers(url, provided_headers) assert headers == provided_headers # Verify proxy with nonstandard port provided_headers = {"Accept": "application/json"} expected_headers = provided_headers.copy() expected_headers.update({"Host": "pypi.org:8080"}) url_with_port = "http://pypi.org:8080/project/urllib3/" headers = p._set_proxy_headers(url_with_port, provided_headers) assert headers == expected_headers def test_default_port(self) -> None: with ProxyManager("http://something") as p: assert p.proxy is not None assert p.proxy.port == 80 with ProxyManager("https://something") as p: assert p.proxy is not None assert p.proxy.port == 443 def test_invalid_scheme(self) -> None: with pytest.raises(AssertionError): ProxyManager("invalid://host/p") with pytest.raises(ValueError): ProxyManager("invalid://host/p") def test_proxy_tunnel(self) -> None: http_url = parse_url("http://example.com") https_url = parse_url("https://example.com") with ProxyManager("http://proxy:8080") as p: assert p._proxy_requires_url_absolute_form(http_url) assert p._proxy_requires_url_absolute_form(https_url) is False with ProxyManager("https://proxy:8080") as p: assert p._proxy_requires_url_absolute_form(http_url) assert p._proxy_requires_url_absolute_form(https_url) is False with ProxyManager("https://proxy:8080", use_forwarding_for_https=True) as p: assert p._proxy_requires_url_absolute_form(http_url) assert p._proxy_requires_url_absolute_form(https_url) def test_proxy_connect_retry(self) -> None: retry = Retry(total=None, connect=False) port = find_unused_port() with ProxyManager(f"http://localhost:{port}") as p: with pytest.raises(ProxyError) as ei: p.urlopen("HEAD", url="http://localhost/", retries=retry) assert isinstance(ei.value.original_error, NewConnectionError) retry = Retry(total=None, connect=2) with ProxyManager(f"http://localhost:{port}") as p: with pytest.raises(MaxRetryError) as ei1: p.urlopen("HEAD", url="http://localhost/", retries=retry) assert ei1.value.reason is not None assert isinstance(ei1.value.reason, ProxyError) assert isinstance(ei1.value.reason.original_error, NewConnectionError) test_ssl.py 0000644 00000017206 15025234504 0006764 0 ustar 00 from __future__ import annotations import ssl import typing from unittest import mock import pytest from urllib3.exceptions import ProxySchemeUnsupported, SSLError from urllib3.util import ssl_ class TestSSL: @pytest.mark.parametrize( "addr", [ # IPv6 "::1", "::", "FE80::8939:7684:D84b:a5A4%251", # IPv4 "127.0.0.1", "8.8.8.8", b"127.0.0.1", # IPv6 w/ Zone IDs "FE80::8939:7684:D84b:a5A4%251", b"FE80::8939:7684:D84b:a5A4%251", "FE80::8939:7684:D84b:a5A4%19", b"FE80::8939:7684:D84b:a5A4%19", ], ) def test_is_ipaddress_true(self, addr: bytes | str) -> None: assert ssl_.is_ipaddress(addr) @pytest.mark.parametrize( "addr", [ "www.python.org", b"www.python.org", "v2.sg.media-imdb.com", b"v2.sg.media-imdb.com", ], ) def test_is_ipaddress_false(self, addr: bytes | str) -> None: assert not ssl_.is_ipaddress(addr) def test_create_urllib3_context_set_ciphers( self, monkeypatch: pytest.MonkeyPatch ) -> None: ciphers = "ECDH+AESGCM:ECDH+CHACHA20" context = mock.create_autospec(ssl_.SSLContext) context.set_ciphers = mock.Mock() context.options = 0 monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) assert ssl_.create_urllib3_context(ciphers=ciphers) is context assert context.set_ciphers.call_count == 1 assert context.set_ciphers.call_args == mock.call(ciphers) def test_create_urllib3_no_context(self) -> None: with mock.patch("urllib3.util.ssl_.SSLContext", None): with pytest.raises(TypeError): ssl_.create_urllib3_context() def test_wrap_socket_given_context_no_load_default_certs(self) -> None: context = mock.create_autospec(ssl_.SSLContext) context.load_default_certs = mock.Mock() sock = mock.Mock() ssl_.ssl_wrap_socket(sock, ssl_context=context) context.load_default_certs.assert_not_called() def test_wrap_socket_given_ca_certs_no_load_default_certs( self, monkeypatch: pytest.MonkeyPatch ) -> None: context = mock.create_autospec(ssl_.SSLContext) context.load_default_certs = mock.Mock() context.options = 0 monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) sock = mock.Mock() ssl_.ssl_wrap_socket(sock, ca_certs="/tmp/fake-file") context.load_default_certs.assert_not_called() context.load_verify_locations.assert_called_with("/tmp/fake-file", None, None) def test_wrap_socket_default_loads_default_certs( self, monkeypatch: pytest.MonkeyPatch ) -> None: context = mock.create_autospec(ssl_.SSLContext) context.load_default_certs = mock.Mock() context.options = 0 monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) sock = mock.Mock() ssl_.ssl_wrap_socket(sock) context.load_default_certs.assert_called_with() def test_wrap_socket_no_ssltransport(self) -> None: with mock.patch("urllib3.util.ssl_.SSLTransport", None): with pytest.raises(ProxySchemeUnsupported): sock = mock.Mock() ssl_.ssl_wrap_socket(sock, tls_in_tls=True) @pytest.mark.parametrize( ["pha", "expected_pha", "cert_reqs"], [ (None, None, None), (None, None, ssl.CERT_NONE), (None, None, ssl.CERT_OPTIONAL), (None, None, ssl.CERT_REQUIRED), (False, True, None), (False, True, ssl.CERT_NONE), (False, True, ssl.CERT_OPTIONAL), (False, True, ssl.CERT_REQUIRED), (True, True, None), (True, True, ssl.CERT_NONE), (True, True, ssl.CERT_OPTIONAL), (True, True, ssl.CERT_REQUIRED), ], ) def test_create_urllib3_context_pha( self, monkeypatch: pytest.MonkeyPatch, pha: bool | None, expected_pha: bool | None, cert_reqs: int | None, ) -> None: context = mock.create_autospec(ssl_.SSLContext) context.set_ciphers = mock.Mock() context.options = 0 context.post_handshake_auth = pha monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) assert ssl_.create_urllib3_context(cert_reqs=cert_reqs) is context assert context.post_handshake_auth == expected_pha def test_create_urllib3_context_default_ciphers( self, monkeypatch: pytest.MonkeyPatch ) -> None: context = mock.create_autospec(ssl_.SSLContext) context.set_ciphers = mock.Mock() context.options = 0 monkeypatch.setattr(ssl_, "SSLContext", lambda *_, **__: context) ssl_.create_urllib3_context() context.set_ciphers.assert_not_called() @pytest.mark.parametrize( "kwargs", [ { "ssl_version": ssl.PROTOCOL_TLSv1, "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED, }, { "ssl_version": ssl.PROTOCOL_TLSv1, "ssl_maximum_version": ssl.TLSVersion.TLSv1, }, { "ssl_version": ssl.PROTOCOL_TLSv1, "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED, "ssl_maximum_version": ssl.TLSVersion.MAXIMUM_SUPPORTED, }, ], ) def test_create_urllib3_context_ssl_version_and_ssl_min_max_version_errors( self, kwargs: dict[str, typing.Any] ) -> None: with pytest.raises(ValueError) as e: ssl_.create_urllib3_context(**kwargs) assert str(e.value) == ( "Can't specify both 'ssl_version' and either 'ssl_minimum_version' or 'ssl_maximum_version'" ) @pytest.mark.parametrize( "kwargs", [ { "ssl_version": ssl.PROTOCOL_TLS, "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED, }, { "ssl_version": ssl.PROTOCOL_TLS_CLIENT, "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED, }, { "ssl_version": None, "ssl_minimum_version": ssl.TLSVersion.MINIMUM_SUPPORTED, }, ], ) def test_create_urllib3_context_ssl_version_and_ssl_min_max_version_no_warning( self, kwargs: dict[str, typing.Any] ) -> None: ssl_.create_urllib3_context(**kwargs) @pytest.mark.parametrize( "kwargs", [ {"ssl_version": ssl.PROTOCOL_TLSv1, "ssl_minimum_version": None}, {"ssl_version": ssl.PROTOCOL_TLSv1, "ssl_maximum_version": None}, { "ssl_version": ssl.PROTOCOL_TLSv1, "ssl_minimum_version": None, "ssl_maximum_version": None, }, ], ) def test_create_urllib3_context_ssl_version_and_ssl_min_max_version_no_error( self, kwargs: dict[str, typing.Any] ) -> None: with pytest.warns( DeprecationWarning, match=r"'ssl_version' option is deprecated and will be removed in " r"urllib3 v2\.1\.0\. Instead use 'ssl_minimum_version'", ): ssl_.create_urllib3_context(**kwargs) def test_assert_fingerprint_raises_exception_on_none_cert(self) -> None: with pytest.raises(SSLError): ssl_.assert_fingerprint( cert=None, fingerprint="55:39:BF:70:05:12:43:FA:1F:D1:BF:4E:E8:1B:07:1D" ) test_fields.py 0000644 00000010526 15025234504 0007427 0 ustar 00 from __future__ import annotations import pytest from urllib3.fields import ( RequestField, format_header_param, format_header_param_html5, format_header_param_rfc2231, format_multipart_header_param, guess_content_type, ) class TestRequestField: @pytest.mark.parametrize( "filename, content_types", [ ("image.jpg", ["image/jpeg", "image/pjpeg"]), ("notsure", ["application/octet-stream"]), (None, ["application/octet-stream"]), ], ) def test_guess_content_type( self, filename: str | None, content_types: list[str] ) -> None: assert guess_content_type(filename) in content_types def test_create(self) -> None: simple_field = RequestField("somename", "data") assert simple_field.render_headers() == "\r\n" filename_field = RequestField("somename", "data", filename="somefile.txt") assert filename_field.render_headers() == "\r\n" headers_field = RequestField( "somename", "data", headers={"Content-Length": "4"} ) assert headers_field.render_headers() == "Content-Length: 4\r\n\r\n" def test_make_multipart(self) -> None: field = RequestField("somename", "data") field.make_multipart(content_type="image/jpg", content_location="/test") assert ( field.render_headers() == 'Content-Disposition: form-data; name="somename"\r\n' "Content-Type: image/jpg\r\n" "Content-Location: /test\r\n" "\r\n" ) def test_make_multipart_empty_filename(self) -> None: field = RequestField("somename", "data", "") field.make_multipart(content_type="application/octet-stream") assert ( field.render_headers() == 'Content-Disposition: form-data; name="somename"; filename=""\r\n' "Content-Type: application/octet-stream\r\n" "\r\n" ) def test_render_parts(self) -> None: field = RequestField("somename", "data") parts = field._render_parts({"name": "value", "filename": "value"}) assert 'name="value"' in parts assert 'filename="value"' in parts parts = field._render_parts([("name", "value"), ("filename", "value")]) assert parts == 'name="value"; filename="value"' @pytest.mark.parametrize( ("value", "expect"), [("näme", "filename*=utf-8''n%C3%A4me"), (b"name", 'filename="name"')], ) def test_format_header_param_rfc2231_deprecated( self, value: bytes | str, expect: str ) -> None: with pytest.deprecated_call(match=r"urllib3 v2\.1\.0"): param = format_header_param_rfc2231("filename", value) assert param == expect def test_format_header_param_html5_deprecated(self) -> None: with pytest.deprecated_call(match=r"urllib3 v2\.1\.0"): param2 = format_header_param_html5("filename", "name") with pytest.deprecated_call(match=r"urllib3 v2\.1\.0"): param1 = format_header_param("filename", "name") assert param1 == param2 @pytest.mark.parametrize( ("value", "expect"), [ ("name", "name"), ("näme", "näme"), (b"n\xc3\xa4me", "näme"), ("ski ⛷.txt", "ski ⛷.txt"), ("control \x1A\x1B\x1C", "control \x1A\x1B\x1C"), ("backslash \\", "backslash \\"), ("quotes '\"", "quotes '%22"), ("newline \n\r", "newline %0A%0D"), ], ) def test_format_multipart_header_param( self, value: bytes | str, expect: str ) -> None: param = format_multipart_header_param("filename", value) assert param == f'filename="{expect}"' def test_from_tuples(self) -> None: field = RequestField.from_tuples("file", ("スキー旅行.txt", "data")) cd = field.headers["Content-Disposition"] assert cd == 'form-data; name="file"; filename="スキー旅行.txt"' def test_from_tuples_rfc2231(self) -> None: with pytest.deprecated_call(match=r"urllib3 v2\.1\.0"): field = RequestField.from_tuples( "file", ("näme", "data"), header_formatter=format_header_param_rfc2231 ) cd = field.headers["Content-Disposition"] assert cd == "form-data; name=\"file\"; filename*=utf-8''n%C3%A4me" conftest.py 0000644 00000030076 15025234504 0006751 0 ustar 00 from __future__ import annotations import contextlib import socket import ssl import typing from pathlib import Path import hypercorn import pytest import trustme import urllib3.http2 from dummyserver.app import hypercorn_app from dummyserver.asgi_proxy import ProxyApp from dummyserver.hypercornserver import run_hypercorn_in_thread from dummyserver.socketserver import HAS_IPV6 from dummyserver.testcase import HTTPSHypercornDummyServerTestCase from urllib3.util import ssl_ from urllib3.util.url import parse_url from .tz_stub import stub_timezone_ctx def pytest_addoption(parser: pytest.Parser) -> None: parser.addoption( "--integration", action="store_true", default=False, help="run integration tests only", ) def pytest_collection_modifyitems( config: pytest.Config, items: list[pytest.Item] ) -> None: integration_mode = bool(config.getoption("--integration")) skip_integration = pytest.mark.skip( reason="skipping, need --integration option to run" ) skip_normal = pytest.mark.skip( reason="skipping non integration tests in --integration mode" ) for item in items: if "integration" in item.keywords and not integration_mode: item.add_marker(skip_integration) elif integration_mode and "integration" not in item.keywords: item.add_marker(skip_normal) class ServerConfig(typing.NamedTuple): scheme: str host: str port: int ca_certs: str @property def base_url(self) -> str: host = self.host if ":" in host: host = f"[{host}]" return f"{self.scheme}://{host}:{self.port}" def _write_cert_to_dir( cert: trustme.LeafCert, tmpdir: Path, file_prefix: str = "server" ) -> dict[str, str]: cert_path = str(tmpdir / ("%s.pem" % file_prefix)) key_path = str(tmpdir / ("%s.key" % file_prefix)) cert.private_key_pem.write_to_path(key_path) cert.cert_chain_pems[0].write_to_path(cert_path) certs = {"keyfile": key_path, "certfile": cert_path} return certs @contextlib.contextmanager def run_server_in_thread( scheme: str, host: str, tmpdir: Path, ca: trustme.CA, server_cert: trustme.LeafCert ) -> typing.Generator[ServerConfig, None, None]: ca_cert_path = str(tmpdir / "ca.pem") ca.cert_pem.write_to_path(ca_cert_path) server_certs = _write_cert_to_dir(server_cert, tmpdir) config = hypercorn.Config() config.certfile = server_certs["certfile"] config.keyfile = server_certs["keyfile"] config.bind = [f"{host}:0"] with run_hypercorn_in_thread(config, hypercorn_app): port = typing.cast(int, parse_url(config.bind[0]).port) yield ServerConfig(scheme, host, port, ca_cert_path) @contextlib.contextmanager def run_server_and_proxy_in_thread( proxy_scheme: str, proxy_host: str, tmpdir: Path, ca: trustme.CA, proxy_cert: trustme.LeafCert, server_cert: trustme.LeafCert, ) -> typing.Generator[tuple[ServerConfig, ServerConfig], None, None]: ca_cert_path = str(tmpdir / "ca.pem") ca.cert_pem.write_to_path(ca_cert_path) server_certs = _write_cert_to_dir(server_cert, tmpdir) proxy_certs = _write_cert_to_dir(proxy_cert, tmpdir, "proxy") with contextlib.ExitStack() as stack: server_config = hypercorn.Config() server_config.certfile = server_certs["certfile"] server_config.keyfile = server_certs["keyfile"] server_config.bind = ["localhost:0"] stack.enter_context(run_hypercorn_in_thread(server_config, hypercorn_app)) port = typing.cast(int, parse_url(server_config.bind[0]).port) proxy_config = hypercorn.Config() proxy_config.certfile = proxy_certs["certfile"] proxy_config.keyfile = proxy_certs["keyfile"] proxy_config.bind = [f"{proxy_host}:0"] stack.enter_context(run_hypercorn_in_thread(proxy_config, ProxyApp())) proxy_port = typing.cast(int, parse_url(proxy_config.bind[0]).port) yield ( ServerConfig(proxy_scheme, proxy_host, proxy_port, ca_cert_path), ServerConfig("https", "localhost", port, ca_cert_path), ) @pytest.fixture(params=["localhost", "127.0.0.1", "::1"]) def loopback_host(request: typing.Any) -> typing.Generator[str, None, None]: host = request.param if host == "::1" and not HAS_IPV6: pytest.skip("Test requires IPv6 on loopback") yield host @pytest.fixture() def san_server( loopback_host: str, tmp_path_factory: pytest.TempPathFactory ) -> typing.Generator[ServerConfig, None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() server_cert = ca.issue_cert(loopback_host) with run_server_in_thread("https", loopback_host, tmpdir, ca, server_cert) as cfg: yield cfg @pytest.fixture() def no_san_server( loopback_host: str, tmp_path_factory: pytest.TempPathFactory ) -> typing.Generator[ServerConfig, None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() server_cert = ca.issue_cert(common_name=loopback_host) with run_server_in_thread("https", loopback_host, tmpdir, ca, server_cert) as cfg: yield cfg @pytest.fixture() def no_san_server_with_different_commmon_name( tmp_path_factory: pytest.TempPathFactory, ) -> typing.Generator[ServerConfig, None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() server_cert = ca.issue_cert(common_name="example.com") with run_server_in_thread("https", "localhost", tmpdir, ca, server_cert) as cfg: yield cfg @pytest.fixture def san_proxy_with_server( loopback_host: str, tmp_path_factory: pytest.TempPathFactory ) -> typing.Generator[tuple[ServerConfig, ServerConfig], None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() proxy_cert = ca.issue_cert(loopback_host) server_cert = ca.issue_cert("localhost") with run_server_and_proxy_in_thread( "https", loopback_host, tmpdir, ca, proxy_cert, server_cert ) as cfg: yield cfg @pytest.fixture def no_san_proxy_with_server( tmp_path_factory: pytest.TempPathFactory, ) -> typing.Generator[tuple[ServerConfig, ServerConfig], None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # only common name, no subject alternative names proxy_cert = ca.issue_cert(common_name="localhost") server_cert = ca.issue_cert("localhost") with run_server_and_proxy_in_thread( "https", "localhost", tmpdir, ca, proxy_cert, server_cert ) as cfg: yield cfg @pytest.fixture def no_localhost_san_server( tmp_path_factory: pytest.TempPathFactory, ) -> typing.Generator[ServerConfig, None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # non localhost common name server_cert = ca.issue_cert("example.com") with run_server_in_thread("https", "localhost", tmpdir, ca, server_cert) as cfg: yield cfg @pytest.fixture def ipv4_san_proxy_with_server( tmp_path_factory: pytest.TempPathFactory, ) -> typing.Generator[tuple[ServerConfig, ServerConfig], None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # IP address in Subject Alternative Name proxy_cert = ca.issue_cert("127.0.0.1") server_cert = ca.issue_cert("localhost") with run_server_and_proxy_in_thread( "https", "127.0.0.1", tmpdir, ca, proxy_cert, server_cert ) as cfg: yield cfg @pytest.fixture def ipv6_san_proxy_with_server( tmp_path_factory: pytest.TempPathFactory, ) -> typing.Generator[tuple[ServerConfig, ServerConfig], None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # IP addresses in Subject Alternative Name proxy_cert = ca.issue_cert("::1") server_cert = ca.issue_cert("localhost") with run_server_and_proxy_in_thread( "https", "::1", tmpdir, ca, proxy_cert, server_cert ) as cfg: yield cfg @pytest.fixture def ipv4_san_server( tmp_path_factory: pytest.TempPathFactory, ) -> typing.Generator[ServerConfig, None, None]: tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # IP address in Subject Alternative Name server_cert = ca.issue_cert("127.0.0.1") with run_server_in_thread("https", "127.0.0.1", tmpdir, ca, server_cert) as cfg: yield cfg @pytest.fixture def ipv6_san_server( tmp_path_factory: pytest.TempPathFactory, ) -> typing.Generator[ServerConfig, None, None]: if not HAS_IPV6: pytest.skip("Only runs on IPv6 systems") tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # IP address in Subject Alternative Name server_cert = ca.issue_cert("::1") with run_server_in_thread("https", "::1", tmpdir, ca, server_cert) as cfg: yield cfg @pytest.fixture def ipv6_no_san_server( tmp_path_factory: pytest.TempPathFactory, ) -> typing.Generator[ServerConfig, None, None]: if not HAS_IPV6: pytest.skip("Only runs on IPv6 systems") tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # IP address in Common Name server_cert = ca.issue_cert(common_name="::1") with run_server_in_thread("https", "::1", tmpdir, ca, server_cert) as cfg: yield cfg @pytest.fixture def stub_timezone(request: pytest.FixtureRequest) -> typing.Generator[None, None, None]: """ A pytest fixture that runs the test with a stub timezone. """ with stub_timezone_ctx(request.param): yield @pytest.fixture(scope="session") def supported_tls_versions() -> typing.AbstractSet[str | None]: # We have to create an actual TLS connection # to test if the TLS version is not disabled by # OpenSSL config. Ubuntu 20.04 specifically # disables TLSv1 and TLSv1.1. tls_versions = set() _server = HTTPSHypercornDummyServerTestCase _server.setup_class() for _ssl_version_name, min_max_version in ( ("PROTOCOL_TLSv1", ssl.TLSVersion.TLSv1), ("PROTOCOL_TLSv1_1", ssl.TLSVersion.TLSv1_1), ("PROTOCOL_TLSv1_2", ssl.TLSVersion.TLSv1_2), ("PROTOCOL_TLS", None), ): _ssl_version = getattr(ssl, _ssl_version_name, 0) if _ssl_version == 0: continue _sock = socket.create_connection((_server.host, _server.port)) try: _sock = ssl_.ssl_wrap_socket( _sock, ssl_context=ssl_.create_urllib3_context( cert_reqs=ssl.CERT_NONE, ssl_minimum_version=min_max_version, ssl_maximum_version=min_max_version, ), ) except ssl.SSLError: pass else: tls_versions.add(_sock.version()) _sock.close() _server.teardown_class() return tls_versions @pytest.fixture(scope="function") def requires_tlsv1(supported_tls_versions: typing.AbstractSet[str]) -> None: """Test requires TLSv1 available""" if not hasattr(ssl, "PROTOCOL_TLSv1") or "TLSv1" not in supported_tls_versions: pytest.skip("Test requires TLSv1") @pytest.fixture(scope="function") def requires_tlsv1_1(supported_tls_versions: typing.AbstractSet[str]) -> None: """Test requires TLSv1.1 available""" if not hasattr(ssl, "PROTOCOL_TLSv1_1") or "TLSv1.1" not in supported_tls_versions: pytest.skip("Test requires TLSv1.1") @pytest.fixture(scope="function") def requires_tlsv1_2(supported_tls_versions: typing.AbstractSet[str]) -> None: """Test requires TLSv1.2 available""" if not hasattr(ssl, "PROTOCOL_TLSv1_2") or "TLSv1.2" not in supported_tls_versions: pytest.skip("Test requires TLSv1.2") @pytest.fixture(scope="function") def requires_tlsv1_3(supported_tls_versions: typing.AbstractSet[str]) -> None: """Test requires TLSv1.3 available""" if ( not getattr(ssl, "HAS_TLSv1_3", False) or "TLSv1.3" not in supported_tls_versions ): pytest.skip("Test requires TLSv1.3") @pytest.fixture(params=["h11", "h2"]) def http_version(request: pytest.FixtureRequest) -> typing.Generator[str, None, None]: if request.param == "h2": urllib3.http2.inject_into_urllib3() yield request.param if request.param == "h2": urllib3.http2.extract_from_urllib3() test_no_ssl.py 0000644 00000001722 15025234504 0007454 0 ustar 00 """ Test what happens if Python was built without SSL * Everything that does not involve HTTPS should still work * HTTPS requests must fail with an error that points at the ssl module """ from __future__ import annotations import sys from test import ImportBlocker, ModuleStash import pytest ssl_blocker = ImportBlocker("ssl", "_ssl") module_stash = ModuleStash("urllib3") class TestWithoutSSL: @classmethod def setup_class(cls) -> None: sys.modules.pop("ssl", None) sys.modules.pop("_ssl", None) module_stash.stash() sys.meta_path.insert(0, ssl_blocker) @classmethod def teardown_class(cls) -> None: sys.meta_path.remove(ssl_blocker) module_stash.pop() class TestImportWithoutSSL(TestWithoutSSL): def test_cannot_import_ssl(self) -> None: with pytest.raises(ImportError): import ssl # noqa: F401 def test_import_urllib3(self) -> None: import urllib3 # noqa: F401 __init__.py 0000644 00000024161 15025234504 0006661 0 ustar 00 from __future__ import annotations import errno import importlib.util import logging import os import platform import socket import sys import typing import warnings from collections.abc import Sequence from functools import wraps from importlib.abc import Loader, MetaPathFinder from importlib.machinery import ModuleSpec from types import ModuleType, TracebackType import pytest try: try: import brotlicffi as brotli # type: ignore[import-not-found] except ImportError: import brotli # type: ignore[import-not-found] except ImportError: brotli = None try: import zstandard as _unused_module_zstd # noqa: F401 except ImportError: HAS_ZSTD = False else: HAS_ZSTD = True from urllib3 import util from urllib3.connectionpool import ConnectionPool from urllib3.exceptions import HTTPWarning from urllib3.util import ssl_ try: import urllib3.contrib.pyopenssl as pyopenssl except ImportError: pyopenssl = None # type: ignore[assignment] if typing.TYPE_CHECKING: import ssl _RT = typing.TypeVar("_RT") # return type _TestFuncT = typing.TypeVar("_TestFuncT", bound=typing.Callable[..., typing.Any]) # We need a host that will not immediately close the connection with a TCP # Reset. if platform.system() == "Windows": # Reserved loopback subnet address TARPIT_HOST = "127.0.0.0" else: # Reserved internet scoped address # https://www.iana.org/assignments/iana-ipv4-special-registry/iana-ipv4-special-registry.xhtml TARPIT_HOST = "240.0.0.0" # (Arguments for socket, is it IPv6 address?) VALID_SOURCE_ADDRESSES = [(("::1", 0), True), (("127.0.0.1", 0), False)] # RFC 5737: 192.0.2.0/24 is for testing only. # RFC 3849: 2001:db8::/32 is for documentation only. INVALID_SOURCE_ADDRESSES = [(("192.0.2.255", 0), False), (("2001:db8::1", 0), True)] # We use timeouts in three different ways in our tests # # 1. To make sure that the operation timeouts, we can use a short timeout. # 2. To make sure that the test does not hang even if the operation should succeed, we # want to use a long timeout, even more so on CI where tests can be really slow # 3. To test our timeout logic by using two different values, eg. by using different # values at the pool level and at the request level. SHORT_TIMEOUT = 0.001 LONG_TIMEOUT = 0.1 if os.environ.get("CI") or os.environ.get("GITHUB_ACTIONS") == "true": LONG_TIMEOUT = 0.5 DUMMY_POOL = ConnectionPool("dummy") def _can_resolve(host: str) -> bool: """Returns True if the system can resolve host to an address.""" try: socket.getaddrinfo(host, None, socket.AF_UNSPEC) return True except socket.gaierror: return False def has_alpn(ctx_cls: type[ssl.SSLContext] | None = None) -> bool: """Detect if ALPN support is enabled.""" ctx_cls = ctx_cls or util.SSLContext ctx = ctx_cls(protocol=ssl_.PROTOCOL_TLS) # type: ignore[misc, attr-defined] try: if hasattr(ctx, "set_alpn_protocols"): ctx.set_alpn_protocols(ssl_.ALPN_PROTOCOLS) return True except NotImplementedError: pass return False # Some systems might not resolve "localhost." correctly. # See https://github.com/urllib3/urllib3/issues/1809 and # https://github.com/urllib3/urllib3/pull/1475#issuecomment-440788064. RESOLVES_LOCALHOST_FQDN = _can_resolve("localhost.") def clear_warnings(cls: type[Warning] = HTTPWarning) -> None: new_filters = [] for f in warnings.filters: if issubclass(f[2], cls): continue new_filters.append(f) warnings.filters[:] = new_filters # type: ignore[index] def setUp() -> None: clear_warnings() warnings.simplefilter("ignore", HTTPWarning) def notWindows() -> typing.Callable[[_TestFuncT], _TestFuncT]: """Skips this test on Windows""" return pytest.mark.skipif( platform.system() == "Windows", reason="Test does not run on Windows", ) def onlyBrotli() -> typing.Callable[[_TestFuncT], _TestFuncT]: return pytest.mark.skipif( brotli is None, reason="only run if brotli library is present" ) def notBrotli() -> typing.Callable[[_TestFuncT], _TestFuncT]: return pytest.mark.skipif( brotli is not None, reason="only run if a brotli library is absent" ) def onlyZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]: return pytest.mark.skipif( not HAS_ZSTD, reason="only run if a python-zstandard library is installed" ) def notZstd() -> typing.Callable[[_TestFuncT], _TestFuncT]: return pytest.mark.skipif( HAS_ZSTD, reason="only run if a python-zstandard library is not installed", ) _requires_network_has_route = None def requires_network() -> typing.Callable[[_TestFuncT], _TestFuncT]: """Helps you skip tests that require the network""" def _is_unreachable_err(err: Exception) -> bool: return getattr(err, "errno", None) in ( errno.ENETUNREACH, errno.EHOSTUNREACH, # For OSX ) def _has_route() -> bool: try: sock = socket.create_connection((TARPIT_HOST, 80), 0.0001) sock.close() return True except socket.timeout: return True except OSError as e: if _is_unreachable_err(e): return False else: raise def _skip_if_no_route(f: _TestFuncT) -> _TestFuncT: """Skip test exuction if network is unreachable""" @wraps(f) def wrapper(*args: typing.Any, **kwargs: typing.Any) -> typing.Any: global _requires_network_has_route if _requires_network_has_route is None: _requires_network_has_route = _has_route() if not _requires_network_has_route: pytest.skip("Can't run the test because the network is unreachable") return f(*args, **kwargs) return typing.cast(_TestFuncT, wrapper) def _decorator_requires_internet( decorator: typing.Callable[[_TestFuncT], _TestFuncT] ) -> typing.Callable[[_TestFuncT], _TestFuncT]: """Mark a decorator with the "requires_internet" mark""" def wrapper(f: _TestFuncT) -> typing.Any: return pytest.mark.requires_network(decorator(f)) return wrapper return _decorator_requires_internet(_skip_if_no_route) def resolvesLocalhostFQDN() -> typing.Callable[[_TestFuncT], _TestFuncT]: """Test requires successful resolving of 'localhost.'""" return pytest.mark.skipif( not RESOLVES_LOCALHOST_FQDN, reason="Can't resolve localhost.", ) def withPyOpenSSL(test: typing.Callable[..., _RT]) -> typing.Callable[..., _RT]: @wraps(test) def wrapper(*args: typing.Any, **kwargs: typing.Any) -> _RT: if not pyopenssl: pytest.skip("pyopenssl not available, skipping test.") return test(*args, **kwargs) pyopenssl.inject_into_urllib3() result = test(*args, **kwargs) pyopenssl.extract_from_urllib3() return result return wrapper class _ListHandler(logging.Handler): def __init__(self) -> None: super().__init__() self.records: list[logging.LogRecord] = [] def emit(self, record: logging.LogRecord) -> None: self.records.append(record) class LogRecorder: def __init__(self, target: logging.Logger = logging.root) -> None: super().__init__() self._target = target self._handler = _ListHandler() @property def records(self) -> list[logging.LogRecord]: return self._handler.records def install(self) -> None: self._target.addHandler(self._handler) def uninstall(self) -> None: self._target.removeHandler(self._handler) def __enter__(self) -> list[logging.LogRecord]: self.install() return self.records def __exit__( self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: TracebackType | None, ) -> typing.Literal[False]: self.uninstall() return False class ImportBlockerLoader(Loader): def __init__(self, fullname: str) -> None: self._fullname = fullname def load_module(self, fullname: str) -> ModuleType: raise ImportError(f"import of {fullname} is blocked") def exec_module(self, module: ModuleType) -> None: raise ImportError(f"import of {self._fullname} is blocked") class ImportBlocker(MetaPathFinder): """ Block Imports To be placed on ``sys.meta_path``. This ensures that the modules specified cannot be imported, even if they are a builtin. """ def __init__(self, *namestoblock: str) -> None: self.namestoblock = namestoblock def find_module( self, fullname: str, path: typing.Sequence[bytes | str] | None = None ) -> Loader | None: if fullname in self.namestoblock: return ImportBlockerLoader(fullname) return None def find_spec( self, fullname: str, path: Sequence[bytes | str] | None, target: ModuleType | None = None, ) -> ModuleSpec | None: loader = self.find_module(fullname, path) if loader is None: return None return importlib.util.spec_from_loader(fullname, loader) class ModuleStash(MetaPathFinder): """ Stashes away previously imported modules If we reimport a module the data from coverage is lost, so we reuse the old modules """ def __init__( self, namespace: str, modules: dict[str, ModuleType] = sys.modules ) -> None: self.namespace = namespace self.modules = modules self._data: dict[str, ModuleType] = {} def stash(self) -> None: if self.namespace in self.modules: self._data[self.namespace] = self.modules.pop(self.namespace) for module in list(self.modules.keys()): if module.startswith(self.namespace + "."): self._data[module] = self.modules.pop(module) def pop(self) -> None: self.modules.pop(self.namespace, None) for module in list(self.modules.keys()): if module.startswith(self.namespace + "."): self.modules.pop(module) self.modules.update(self._data) test_crcmod.py 0000644 00000000110 15025252003 0007406 0 ustar 00 import unittest import crcmod.test unittest.main(module=crcmod.test) examples.py 0000644 00000003170 15025252003 0006727 0 ustar 00 #----------------------------------------------------------------------------- # Demonstrate the use of the code generator from crcmod import Crc g8 = 0x185 g16 = 0x11021 g24 = 0x15D6DCB g32 = 0x104C11DB7 def polyFromBits(bits): p = 0 for n in bits: p = p | (1 << n) return p # The following is from Standard ECMA-182 "Data Interchange on 12,7 mm 48-Track # Magnetic Tape Cartridges -DLT1 Format-", December 1992. g64 = polyFromBits([64, 62, 57, 55, 54, 53, 52, 47, 46, 45, 40, 39, 38, 37, 35, 33, 32, 31, 29, 27, 24, 23, 22, 21, 19, 17, 13, 12, 10, 9, 7, 4, 1, 0]) print('Generating examples.c') out = open('examples.c', 'w') out.write('''// Define the required data types typedef unsigned char UINT8; typedef unsigned short UINT16; typedef unsigned int UINT32; typedef unsigned long long UINT64; ''') Crc(g8, rev=False).generateCode('crc8',out) Crc(g8, rev=True).generateCode('crc8r',out) Crc(g16, rev=False).generateCode('crc16',out) Crc(g16, rev=True).generateCode('crc16r',out) Crc(g24, rev=False).generateCode('crc24',out) Crc(g24, rev=True).generateCode('crc24r',out) Crc(g32, rev=False).generateCode('crc32',out) Crc(g32, rev=True).generateCode('crc32r',out) Crc(g64, rev=False).generateCode('crc64',out) Crc(g64, rev=True).generateCode('crc64r',out) # Check out the XOR-out feature. Crc(g16, initCrc=0, rev=True, xorOut=~0).generateCode('crc16x',out) Crc(g24, initCrc=0, rev=True, xorOut=~0).generateCode('crc24x',out) Crc(g32, initCrc=0, rev=True, xorOut=~0).generateCode('crc32x',out) Crc(g64, initCrc=0, rev=True, xorOut=~0).generateCode('crc64x',out) out.close() print('Done')
| ver. 1.4 |
Github
|
.
| PHP 8.2.28 | Generation time: 0.03 |
proxy
|
phpinfo
|
Settings