From b41712acd64dd18c4023928d57845ab7475b79cd Mon Sep 17 00:00:00 2001 From: Anuraag Agrawal Date: Tue, 20 Jan 2026 19:55:50 +0900 Subject: [PATCH] Migrate to pyqwest Signed-off-by: Anuraag Agrawal --- README.md | 26 +-- conformance/test/client.py | 99 +++------ conformance/test/test_client.py | 44 +--- noextras/test/test_compression_default.py | 15 +- pyproject.toml | 8 +- src/connectrpc/_client_async.py | 142 +++++------- src/connectrpc/_client_shared.py | 44 ++-- src/connectrpc/_client_sync.py | 87 ++++---- src/connectrpc/_envelope.py | 6 +- src/connectrpc/_protocol.py | 13 +- src/connectrpc/_protocol_connect.py | 4 +- src/connectrpc/_protocol_grpc.py | 15 +- src/connectrpc/_response_metadata.py | 17 +- test/test_client.py | 9 +- test/test_details.py | 9 +- test/test_errors.py | 259 +++++++++++----------- test/test_example.py | 2 - test/test_interceptor.py | 11 +- test/test_lifespan.py | 98 ++------ test/test_roundtrip.py | 21 +- uv.lock | 105 ++++----- 21 files changed, 426 insertions(+), 608 deletions(-) diff --git a/README.md b/README.md index 5f4298f..5c5ba42 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ This repo provides a Python implementation of Connect, including both client and ## Features -- **Clients**: Both synchronous and asynchronous clients backed by [httpx](https://www.python-httpx.org/) +- **Clients**: Both synchronous and asynchronous clients backed by [pyqwest](https://pyqwest.dev/) - **Servers**: WSGI and ASGI server implementations for use with any Python app server - **Type Safety**: Fully type-annotated, including the generated code - **Compression**: Built-in support for gzip, brotli, and zstd compression @@ -63,8 +63,8 @@ it can be referenced as `protoc-gen-connect-python`. Then, you can use `protoc-gen-connect-python` as a local plugin: ```yaml - - local: .venv/bin/protoc-gen-connect-python - out: . +- local: .venv/bin/protoc-gen-connect-python + out: . ``` Alternatively, download a precompiled binary from the @@ -79,18 +79,14 @@ For more usage details, see the [docs](./docs/usage.md). ### Basic Client Usage ```python -import httpx from your_service_pb2 import HelloRequest, HelloResponse from your_service_connect import HelloServiceClient # Create async client async def main(): - async with httpx.AsyncClient() as session: - client = HelloServiceClient( - base_url="https://api.example.com", - session=session - ) - + async with HelloServiceClient( + base_url="https://api.example.com", + ) as client: # Make a unary RPC call response = await client.say_hello(HelloRequest(name="World")) print(response.message) # "Hello, World!" @@ -117,18 +113,14 @@ app = HelloServiceASGIApplication(MyHelloService()) ### Basic Client Usage (Synchronous) ```python -import httpx from your_service_pb2 import HelloRequest from your_service_connect import HelloServiceClientSync # Create sync client def main(): - with httpx.Client() as session: - client = HelloServiceClientSync( - base_url="https://api.example.com", - session=session - ) - + with HelloServiceClientSync( + base_url="https://api.example.com", + ) as client: # Make a unary RPC call response = client.say_hello(HelloRequest(name="World")) print(response.message) # "Hello, World!" diff --git a/conformance/test/client.py b/conformance/test/client.py index 535b628..ecabb30 100644 --- a/conformance/test/client.py +++ b/conformance/test/client.py @@ -5,14 +5,11 @@ import contextlib import multiprocessing import queue -import ssl import sys import time import traceback -from tempfile import NamedTemporaryFile from typing import TYPE_CHECKING, Literal, TypeVar, get_args -import httpx from _util import create_standard_streams from gen.connectrpc.conformance.v1.client_compat_pb2 import ( ClientCompatRequest, @@ -40,9 +37,8 @@ UnimplementedRequest, ) from google.protobuf.message import Message -from pyqwest import HTTPTransport, SyncHTTPTransport +from pyqwest import Client, HTTPTransport, SyncClient, SyncHTTPTransport from pyqwest import HTTPVersion as PyQwestHTTPVersion -from pyqwest.httpx import AsyncPyqwestTransport, PyqwestTransport from connectrpc.client import ResponseMetadata from connectrpc.code import Code @@ -118,41 +114,8 @@ def _unpack_request(message: Any, request: T) -> T: return request -async def httpx_client_kwargs(test_request: ClientCompatRequest) -> dict: - kwargs = {} - match test_request.http_version: - case HTTPVersion.HTTP_VERSION_1: - kwargs["http1"] = True - kwargs["http2"] = False - case HTTPVersion.HTTP_VERSION_2: - kwargs["http1"] = False - kwargs["http2"] = True - if test_request.server_tls_cert: - ctx = ssl.create_default_context( - purpose=ssl.Purpose.SERVER_AUTH, - cadata=test_request.server_tls_cert.decode(), - ) - if test_request.HasField("client_tls_creds"): - - def load_certs() -> None: - with ( - NamedTemporaryFile() as cert_file, - NamedTemporaryFile() as key_file, - ): - cert_file.write(test_request.client_tls_creds.cert) - cert_file.flush() - key_file.write(test_request.client_tls_creds.key) - key_file.flush() - ctx.load_cert_chain(certfile=cert_file.name, keyfile=key_file.name) - - await asyncio.to_thread(load_certs) - kwargs["verify"] = ctx - - return kwargs - - def pyqwest_client_kwargs(test_request: ClientCompatRequest) -> dict: - kwargs: dict = {"enable_gzip": True, "enable_brotli": True, "enable_zstd": True} + kwargs: dict = {} match test_request.http_version: case HTTPVersion.HTTP_VERSION_1: kwargs["http_version"] = PyQwestHTTPVersion.HTTP1 @@ -169,28 +132,26 @@ def pyqwest_client_kwargs(test_request: ClientCompatRequest) -> dict: @contextlib.asynccontextmanager async def client_sync( - test_request: ClientCompatRequest, client_type: Client + test_request: ClientCompatRequest, ) -> AsyncIterator[ConformanceServiceClientSync]: read_max_bytes = None if test_request.message_receive_limit: read_max_bytes = test_request.message_receive_limit scheme = "https" if test_request.server_tls_cert else "http" + args = pyqwest_client_kwargs(test_request) + cleanup = contextlib.ExitStack() - match client_type: - case "httpx": - args = await httpx_client_kwargs(test_request) - session = cleanup.enter_context(httpx.Client(**args)) - case "pyqwest": - args = pyqwest_client_kwargs(test_request) - http_transport = cleanup.enter_context(SyncHTTPTransport(**args)) - transport = cleanup.enter_context(PyqwestTransport(http_transport)) - session = cleanup.enter_context(httpx.Client(transport=transport)) + if args: + transport = cleanup.enter_context(SyncHTTPTransport(**args)) + http_client = SyncClient(transport) + else: + http_client = None with ( cleanup, ConformanceServiceClientSync( f"{scheme}://{test_request.host}:{test_request.port}", - session=session, + http_client=http_client, send_compression=_convert_compression(test_request.compression), proto_json=test_request.codec == Codec.CODEC_JSON, grpc=test_request.protocol == Protocol.PROTOCOL_GRPC, @@ -202,32 +163,29 @@ async def client_sync( @contextlib.asynccontextmanager async def client_async( - test_request: ClientCompatRequest, client_type: Client + test_request: ClientCompatRequest, ) -> AsyncIterator[ConformanceServiceClient]: read_max_bytes = None if test_request.message_receive_limit: read_max_bytes = test_request.message_receive_limit scheme = "https" if test_request.server_tls_cert else "http" + args = pyqwest_client_kwargs(test_request) + cleanup = contextlib.AsyncExitStack() - match client_type: - case "httpx": - args = await httpx_client_kwargs(test_request) - session = await cleanup.enter_async_context(httpx.AsyncClient(**args)) - case "pyqwest": - args = pyqwest_client_kwargs(test_request) - http_transport = await cleanup.enter_async_context(HTTPTransport(**args)) - transport = await cleanup.enter_async_context( - AsyncPyqwestTransport(http_transport) - ) - session = await cleanup.enter_async_context( - httpx.AsyncClient(transport=transport) - ) + if args: + transport = HTTPTransport(**args) + # Type parameter for enter_async_context requires coroutine even though + # implementation doesn't. We can directly push aexit to work around it. + cleanup.push_async_exit(transport.__aexit__) + http_client = Client(transport) + else: + http_client = None async with ( cleanup, ConformanceServiceClient( f"{scheme}://{test_request.host}:{test_request.port}", - session=session, + http_client=http_client, send_compression=_convert_compression(test_request.compression), proto_json=test_request.codec == Codec.CODEC_JSON, grpc=test_request.protocol == Protocol.PROTOCOL_GRPC, @@ -238,7 +196,7 @@ async def client_async( async def _run_test( - mode: Mode, test_request: ClientCompatRequest, client_type: Client + mode: Mode, test_request: ClientCompatRequest ) -> ClientCompatResponse: test_response = ClientCompatResponse() test_response.test_name = test_request.test_name @@ -260,7 +218,7 @@ async def _run_test( request_closed = asyncio.Event() match mode: case "sync": - async with client_sync(test_request, client_type) as client: + async with client_sync(test_request) as client: match test_request.method: case "BidiStream": request_queue = queue.Queue() @@ -468,7 +426,7 @@ def send_unary_request_sync( task.cancel() await task case "async": - async with client_async(test_request, client_type) as client: + async with client_async(test_request) as client: match test_request.method: case "BidiStream": request_queue = asyncio.Queue() @@ -691,12 +649,10 @@ async def send_unary_request( Mode = Literal["sync", "async"] -Client = Literal["httpx", "pyqwest"] class Args(argparse.Namespace): mode: Mode - client: Client parallel: int @@ -704,7 +660,6 @@ async def main() -> None: parser = argparse.ArgumentParser(description="Conformance client") parser.add_argument("--mode", choices=get_args(Mode)) parser.add_argument("--parallel", type=int, default=multiprocessing.cpu_count() * 4) - parser.add_argument("--client", choices=get_args(Client)) args = parser.parse_args(namespace=Args()) stdin, stdout = await create_standard_streams() @@ -724,7 +679,7 @@ async def main() -> None: async def task(request: ClientCompatRequest) -> None: async with sema: - response = await _run_test(args.mode, request, args.client) + response = await _run_test(args.mode, request) response_buf = response.SerializeToString() size_buf = len(response_buf).to_bytes(4, byteorder="big") diff --git a/conformance/test/test_client.py b/conformance/test/test_client.py index bbafad2..98f040a 100644 --- a/conformance/test/test_client.py +++ b/conformance/test/test_client.py @@ -18,40 +18,12 @@ "Client Cancellation/**", ] -_httpx_opts = [ - # Trailers not supported - "--skip", - "**/Protocol:PROTOCOL_GRPC/**", - "--skip", - "gRPC Trailers/**", - "--skip", - "gRPC Unexpected Responses/**", - "--skip", - "gRPC Empty Responses/**", - "--skip", - "gRPC Proto Sub-Format Responses/**", - # Bidirectional streaming not supported - "--skip", - "**/full-duplex/**", - # Cancellation delivery isn't reliable - "--known-flaky", - "Client Cancellation/**", - "--known-flaky", - "Timeouts/**", -] - -@pytest.mark.parametrize("client", ["httpx", "pyqwest"]) -def test_client_sync(client: str) -> None: +def test_client_sync() -> None: args = maybe_patch_args_with_debug( - [sys.executable, _client_py_path, "--mode", "sync", "--client", client] + [sys.executable, _client_py_path, "--mode", "sync"] ) - opts = [] - match client: - case "httpx": - opts = _httpx_opts - result = subprocess.run( [ "go", @@ -61,7 +33,6 @@ def test_client_sync(client: str) -> None: _config_path, "--mode", "client", - *opts, *_skipped_tests_sync, "--", *args, @@ -74,17 +45,11 @@ def test_client_sync(client: str) -> None: pytest.fail(f"\n{result.stdout}\n{result.stderr}") -@pytest.mark.parametrize("client", ["httpx", "pyqwest"]) -def test_client_async(client: str) -> None: +def test_client_async() -> None: args = maybe_patch_args_with_debug( - [sys.executable, _client_py_path, "--mode", "async", "--client", client] + [sys.executable, _client_py_path, "--mode", "async"] ) - opts = [] - match client: - case "httpx": - opts = _httpx_opts - result = subprocess.run( [ "go", @@ -94,7 +59,6 @@ def test_client_async(client: str) -> None: _config_path, "--mode", "client", - *opts, "--", *args, ], diff --git a/noextras/test/test_compression_default.py b/noextras/test/test_compression_default.py index d94d4ac..416ef3d 100644 --- a/noextras/test/test_compression_default.py +++ b/noextras/test/test_compression_default.py @@ -11,7 +11,8 @@ ElizaServiceWSGIApplication, ) from example.eliza_pb2 import SayRequest, SayResponse -from httpx import ASGITransport, AsyncClient, Client, WSGITransport +from pyqwest import Client, SyncClient +from pyqwest.testing import ASGITransport, WSGITransport @pytest.mark.parametrize("compression", ["gzip", "identity", None]) @@ -23,7 +24,7 @@ def say(self, request, ctx): app = ElizaServiceWSGIApplication(RoundtripElizaServiceSync()) with ElizaServiceClientSync( "http://localhost", - session=Client(transport=WSGITransport(app=app)), + http_client=SyncClient(WSGITransport(app=app)), send_compression=compression, accept_compression=[compression] if compression else None, ) as client: @@ -39,10 +40,10 @@ async def say(self, request, ctx): return SayResponse(sentence=request.sentence) app = ElizaServiceASGIApplication(DetailsElizaService()) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) async with ElizaServiceClient( "http://localhost", - session=AsyncClient(transport=transport), + http_client=Client(transport), send_compression=compression, accept_compression=[compression] if compression else None, ) as client: @@ -63,7 +64,7 @@ def say(self, request, ctx): ) as exc_info: ElizaServiceClientSync( "http://localhost", - session=Client(transport=WSGITransport(app=app)), + http_client=SyncClient(WSGITransport(app=app)), send_compression=compression, accept_compression=[compression] if compression else None, ) @@ -81,13 +82,13 @@ async def say(self, request, ctx): return SayResponse(sentence=request.sentence) app = ElizaServiceASGIApplication(DetailsElizaService()) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) with pytest.raises( ValueError, match=r"Unsupported compression method: .*" ) as exc_info: ElizaServiceClient( "http://localhost", - session=AsyncClient(transport=transport), + http_client=Client(transport), send_compression=compression, accept_compression=[compression] if compression else None, ) diff --git a/pyproject.toml b/pyproject.toml index 6cb4b0f..09e3f11 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ maintainers = [ { name = "Yasushi Itoh", email = "i2y.may.roku@gmail.com" }, ] requires-python = ">= 3.10" -dependencies = ["httpx>=0.28.1", "protobuf>=5.28"] +dependencies = ["protobuf>=5.28", "pyqwest>=0.3.2"] readme = "README.md" license = "Apache-2.0" keywords = ["rpc", "grpc", "connect", "protobuf", "http"] @@ -41,13 +41,11 @@ dev = [ "bump-my-version==1.2.4", "connect-python-example", "daphne==4.2.1", - "httpx[http2]==0.28.1", "hypercorn==0.17.3", "granian==2.5.7", "grpcio-tools==1.76.0", "gunicorn[gevent]==23.0.0", "just-bin==1.42.4; sys_platform != 'win32'", - "pyqwest==0.1.0", "pyright[nodejs]==1.1.405", "pyvoy==0.2.0", "ruff~=0.13.2", @@ -83,7 +81,7 @@ module-name = "connectrpc" [tool.pytest] strict = true testpaths = ["test"] -timeout = "1800" # 30 min +timeout = "1800" # 30 min [tool.ruff.format] skip-magic-trailing-comma = true @@ -120,7 +118,7 @@ extend-select = [ "RET", # This rule is a bit strict since accessing private members within this package can be # useful to reduce public API exposure. But it is important to not access private members - # of dependencies like httpx so we enable it and ignore the locations where we need to. + # of external dependencies so we enable it and ignore the locations where we need to. "SLF", "SIM", "SLOT", diff --git a/src/connectrpc/_client_async.py b/src/connectrpc/_client_async.py index c3f8bc8..db496db 100644 --- a/src/connectrpc/_client_async.py +++ b/src/connectrpc/_client_async.py @@ -4,9 +4,11 @@ import functools from asyncio import CancelledError, sleep, wait_for from typing import TYPE_CHECKING, Any, Protocol, TypeVar +from urllib.parse import urlencode -import httpx -from httpx import USE_CLIENT_DEFAULT, Timeout +from pyqwest import Client as HTTPClient +from pyqwest import FullResponse, Response +from pyqwest import Headers as HTTPHeaders from . import _client_shared from ._asyncio_timeout import timeout as asyncio_timeout @@ -95,7 +97,7 @@ def __init__( timeout_ms: int | None = None, read_max_bytes: int | None = None, interceptors: Iterable[Interceptor] = (), - session: httpx.AsyncClient | None = None, + http_client: HTTPClient | None = None, ) -> None: """Creates a new asynchronous Connect client. @@ -107,7 +109,7 @@ def __init__( timeout_ms: The timeout for requests in milliseconds read_max_bytes: The maximum number of bytes to read from the response interceptors: A list of interceptors to apply to requests - session: An httpx Client to use for requests + http_client: A pyqwest Client to use for requests """ self._address = address self._codec = get_proto_json_codec() if proto_json else get_proto_binary_codec() @@ -117,14 +119,11 @@ def __init__( ) self._timeout_ms = timeout_ms self._read_max_bytes = read_max_bytes - if session: - self._session = session - self._close_client = False + if http_client: + self._http_client = http_client else: - self._session = httpx.AsyncClient( - timeout=_convert_connect_timeout(timeout_ms) - ) - self._close_client = True + # Use shared default transport if not specified + self._http_client = HTTPClient() self._closed = False if grpc: @@ -173,8 +172,6 @@ async def close(self) -> None: """Close the HTTP client. After closing, the client cannot be used to make requests.""" if not self._closed: self._closed = True - if self._close_client: - await self._session.aclose() async def __aenter__(self) -> Self: return self @@ -276,14 +273,12 @@ async def _send_request_unary( self._send_request_bidi_stream(_yield_single_message(request), ctx) ) - request_headers = httpx.Headers(list(ctx.request_headers().allitems())) + request_headers = HTTPHeaders(ctx.request_headers().allitems()) url = f"{self._address}/{ctx.method().service_name}/{ctx.method().name}" if (timeout_ms := ctx.timeout_ms()) is not None: timeout_s = timeout_ms / 1000.0 - timeout = _convert_connect_timeout(timeout_ms) else: timeout_s = None - timeout = USE_CLIENT_DEFAULT try: request_data = self._codec.encode(request) @@ -294,35 +289,29 @@ async def _send_request_unary( params = _client_shared.prepare_get_params( self._codec, request_data, request_headers ) + params_str = urlencode(params) + url = f"{url}?{params_str}" request_headers.pop("content-type", None) resp = await wait_for( - self._session.get( - url=url, headers=request_headers, params=params, timeout=timeout - ), - timeout_s, + self._http_client.get(url=url, headers=request_headers), timeout_s ) else: resp = await wait_for( - self._session.post( - url=url, - headers=request_headers, - content=request_data, - timeout=timeout, + self._http_client.post( + url=url, headers=request_headers, content=request_data ), timeout_s, ) self._protocol.validate_response( - self._codec.name(), - resp.status_code, - resp.headers.get("content-type", ""), + self._codec.name(), resp.status, resp.headers.get("content-type", "") ) - # Decompression itself is handled by httpx, but we validate it + # Decompression itself is handled by pyqwest, but we validate it # by resolving it. self._protocol.handle_response_compression(resp.headers, stream=False) handle_response_headers(resp.headers) - if resp.status_code == 200: + if resp.status == 200: if ( self._read_max_bytes is not None and len(resp.content) > self._read_max_bytes @@ -336,7 +325,7 @@ async def _send_request_unary( self._codec.decode(resp.content, response) return response raise ConnectWireError.from_response(resp).to_exception() - except (httpx.TimeoutException, TimeoutError, asyncio.TimeoutError) as e: + except (TimeoutError, asyncio.TimeoutError) as e: raise ConnectError(Code.DEADLINE_EXCEEDED, "Request timed out") from e except ConnectError: raise @@ -360,58 +349,59 @@ def _send_request_server_stream( async def _send_request_bidi_stream( self, request: AsyncIterator[REQ], ctx: RequestContext[REQ, RES] ) -> AsyncIterator[RES]: - request_headers = httpx.Headers(list(ctx.request_headers().allitems())) + request_headers = HTTPHeaders(ctx.request_headers().allitems()) url = f"{self._address}/{ctx.method().service_name}/{ctx.method().name}" if (timeout_ms := ctx.timeout_ms()) is not None: timeout_s = timeout_ms / 1000.0 - timeout = _convert_connect_timeout(timeout_ms) else: timeout_s = None - timeout = USE_CLIENT_DEFAULT reader: EnvelopeReader | None = None - resp: httpx.Response | None = None + resp: Response | None = None try: request_data = _streaming_request_content( request, self._codec, self._send_compression ) - async with asyncio_timeout(timeout_s): - httpx_req = self._session.build_request( - method="POST", - url=url, - headers=request_headers, - content=request_data, - timeout=timeout, - ) - resp = await self._session.send(httpx_req, stream=True) - try: - handle_response_headers(resp.headers) - if resp.status_code == 200: - self._protocol.validate_stream_response( - self._codec.name(), resp.headers.get("content-type", "") - ) - compression = self._protocol.handle_response_compression( - resp.headers, stream=True - ) - reader = self._protocol.create_envelope_reader( - ctx.method().output, - self._codec, - compression, - self._read_max_bytes, - ) - async for chunk in resp.aiter_bytes(): - for message in reader.feed(chunk): - yield message - # Check for cancellation each message. While this seems heavyweight, - # conformance tests require it. - await sleep(0) - reader.handle_response_complete(resp) - else: - raise ConnectWireError.from_response(resp).to_exception() - finally: - await asyncio.shield(resp.aclose()) - except (httpx.TimeoutException, TimeoutError, asyncio.TimeoutError) as e: + async with ( + asyncio_timeout(timeout_s), + self._http_client.stream( + "POST", url, headers=request_headers, content=request_data + ) as resp, + ): + handle_response_headers(resp.headers) + if resp.status == 200: + self._protocol.validate_stream_response( + self._codec.name(), resp.headers.get("content-type", "") + ) + compression = self._protocol.handle_response_compression( + resp.headers, stream=True + ) + reader = self._protocol.create_envelope_reader( + ctx.method().output, + self._codec, + compression, + self._read_max_bytes, + ) + async for chunk in resp.content: + for message in reader.feed(bytes(chunk)): + yield message + # Check for cancellation each message. While this seems heavyweight, + # conformance tests require it. + await sleep(0) + reader.handle_response_complete(resp) + else: + content = bytearray() + async for chunk in resp.content: + content.extend(chunk) + fres = FullResponse( + status=resp.status, + headers=resp.headers, + content=bytes(content), + trailers=resp.trailers, + ) + raise ConnectWireError.from_response(fres).to_exception() + except (TimeoutError, asyncio.TimeoutError) as e: raise ConnectError(Code.DEADLINE_EXCEEDED, "Request timed out") from e except ConnectError: raise @@ -427,16 +417,6 @@ async def _send_request_bidi_stream( raise ConnectError(Code.UNAVAILABLE, str(e)) from e -def _convert_connect_timeout(timeout_ms: float | None) -> Timeout: - if timeout_ms is None: - # If no timeout provided, match connect-go's default behavior of a 30s connect timeout - # and no read/write timeouts. - return Timeout(None, connect=30.0) - # We apply the timeout to the entire operation per connect's semantics so don't need - # HTTP timeout - return Timeout(None) - - async def _streaming_request_content( msgs: AsyncIterator[Any], codec: Codec, compression: Compression | None ) -> AsyncIterator[bytes]: diff --git a/src/connectrpc/_client_shared.py b/src/connectrpc/_client_shared.py index 474150b..fe91b9f 100644 --- a/src/connectrpc/_client_shared.py +++ b/src/connectrpc/_client_shared.py @@ -5,7 +5,8 @@ from http import HTTPStatus from typing import TYPE_CHECKING, TypeVar -from httpx import RemoteProtocolError +from pyqwest import Headers as HTTPHeaders +from pyqwest import StreamError, StreamErrorCode from . import _compression from ._codec import CODEC_NAME_JSON, CODEC_NAME_JSON_CHARSET_UTF8, Codec @@ -21,8 +22,6 @@ from .errors import ConnectError if TYPE_CHECKING: - from httpx import Headers as HttpxHeaders - from .request import RequestContext @@ -44,7 +43,7 @@ def resolve_send_compression(compression_name: str | None) -> Compression | None def prepare_get_params( - codec: Codec, request_data: bytes, headers: HttpxHeaders + codec: Codec, request_data: bytes, headers: HTTPHeaders ) -> dict[str, str]: params = {"connect": f"v{CONNECT_PROTOCOL_VERSION}"} if request_data: @@ -138,42 +137,33 @@ def validate_stream_response_content_type( def maybe_map_stream_reset( e: Exception, ctx: RequestContext[REQ, RES] ) -> ConnectError | None: - if not isinstance(e, RemoteProtocolError): + if not isinstance(e, StreamError): return None msg = str(e) - # HTTPX serializes httpcore exceptions to string unfortunately - # https://github.com/encode/httpx/blob/ae1b9f66238f75ced3ced5e4485408435de10768/httpx/_transports/default.py#L117 - match = _stream_error_code_regex.match(msg) - if not match: - return None - - # don't need when httpx without h2 is installed - from h2.errors import ErrorCodes # noqa: PLC0415 - - match int(match.group(1)): + match e.code: case ( - ErrorCodes.NO_ERROR - | ErrorCodes.PROTOCOL_ERROR - | ErrorCodes.INTERNAL_ERROR - | ErrorCodes.FLOW_CONTROL_ERROR - | ErrorCodes.SETTINGS_TIMEOUT - | ErrorCodes.FRAME_SIZE_ERROR - | ErrorCodes.COMPRESSION_ERROR - | ErrorCodes.CONNECT_ERROR + StreamErrorCode.NO_ERROR + | StreamErrorCode.PROTOCOL_ERROR + | StreamErrorCode.INTERNAL_ERROR + | StreamErrorCode.FLOW_CONTROL_ERROR + | StreamErrorCode.SETTINGS_TIMEOUT + | StreamErrorCode.FRAME_SIZE_ERROR + | StreamErrorCode.COMPRESSION_ERROR + | StreamErrorCode.CONNECT_ERROR ): return ConnectError(Code.INTERNAL, msg) - case ErrorCodes.REFUSED_STREAM: + case StreamErrorCode.REFUSED_STREAM: return ConnectError(Code.UNAVAILABLE, msg) - case ErrorCodes.CANCEL: + case StreamErrorCode.CANCEL: # Some servers use CANCEL when deadline expires. We can't differentiate # that from normal cancel without checking our own deadline. if (t := ctx.timeout_ms()) is not None and t <= 0: return ConnectError(Code.DEADLINE_EXCEEDED, msg) return ConnectError(Code.CANCELED, msg) - case ErrorCodes.ENHANCE_YOUR_CALM: + case StreamErrorCode.ENHANCE_YOUR_CALM: return ConnectError(Code.RESOURCE_EXHAUSTED, f"Bandwidth exhausted: {msg}") - case ErrorCodes.INADEQUATE_SECURITY: + case StreamErrorCode.INADEQUATE_SECURITY: return ConnectError( Code.PERMISSION_DENIED, f"Transport protocol insecure: {msg}" ) diff --git a/src/connectrpc/_client_sync.py b/src/connectrpc/_client_sync.py index 30b46fd..decdccd 100644 --- a/src/connectrpc/_client_sync.py +++ b/src/connectrpc/_client_sync.py @@ -2,9 +2,10 @@ import functools from typing import TYPE_CHECKING, Any, Protocol, TypeVar +from urllib.parse import urlencode -import httpx -from httpx import USE_CLIENT_DEFAULT, Timeout +from pyqwest import FullResponse, SyncClient, SyncResponse +from pyqwest import Headers as HTTPHeaders from connectrpc._protocol_grpc import GRPCClientProtocol @@ -86,7 +87,7 @@ def __init__( timeout_ms: int | None = None, read_max_bytes: int | None = None, interceptors: Iterable[InterceptorSync] = (), - session: httpx.Client | None = None, + http_client: SyncClient | None = None, ) -> None: """Creates a new synchronous Connect client. @@ -98,7 +99,7 @@ def __init__( timeout_ms: The timeout for requests in milliseconds read_max_bytes: The maximum number of bytes to read from the response interceptors: A list of interceptors to apply to requests - session: An httpx Client to use for requests + http_client: A pyqwest SyncClient to use for requests """ self._address = address self._codec = get_proto_json_codec() if proto_json else get_proto_binary_codec() @@ -108,12 +109,11 @@ def __init__( self._send_compression = _client_shared.resolve_send_compression( send_compression ) - if session: - self._session = session - self._close_client = False + if http_client: + self._http_client = http_client else: - self._session = httpx.Client(timeout=_convert_connect_timeout(timeout_ms)) - self._close_client = True + # Use shared default transport if not specified + self._http_client = SyncClient() self._closed = False if grpc: @@ -168,8 +168,6 @@ def close(self) -> None: """Close the HTTP client. After closing, the client cannot be used to make requests.""" if not self._closed: self._closed = True - if self._close_client: - self._session.close() def __enter__(self) -> Self: return self @@ -269,12 +267,12 @@ def _send_request_unary(self, request: REQ, ctx: RequestContext[REQ, RES]) -> RE self._send_request_bidi_stream(iter([request]), ctx) ) - request_headers = httpx.Headers(list(ctx.request_headers().allitems())) + request_headers = HTTPHeaders(ctx.request_headers().allitems()) url = f"{self._address}/{ctx.method().service_name}/{ctx.method().name}" if (timeout_ms := ctx.timeout_ms()) is not None: - timeout = _convert_connect_timeout(timeout_ms) + timeout_s = timeout_ms / 1000.0 else: - timeout = USE_CLIENT_DEFAULT + timeout_s = None try: request_data = self._codec.encode(request) @@ -285,29 +283,29 @@ def _send_request_unary(self, request: REQ, ctx: RequestContext[REQ, RES]) -> RE params = _client_shared.prepare_get_params( self._codec, request_data, request_headers ) + params_str = urlencode(params) + url = f"{url}?{params_str}" request_headers.pop("content-type", None) - resp = self._session.get( - url=url, headers=request_headers, params=params, timeout=timeout + resp = self._http_client.get( + url=url, headers=request_headers, timeout=timeout_s ) else: - resp = self._session.post( + resp = self._http_client.post( url=url, headers=request_headers, content=request_data, - timeout=timeout, + timeout=timeout_s, ) self._protocol.validate_response( - self._codec.name(), - resp.status_code, - resp.headers.get("content-type", ""), + self._codec.name(), resp.status, resp.headers.get("content-type", "") ) - # Decompression itself is handled by httpx, but we validate it + # Decompression itself is handled by pyqwest, but we validate it # by resolving it. self._protocol.handle_response_compression(resp.headers, stream=False) handle_response_headers(resp.headers) - if resp.status_code == 200: + if resp.status == 200: if ( self._read_max_bytes is not None and len(resp.content) > self._read_max_bytes @@ -321,7 +319,7 @@ def _send_request_unary(self, request: REQ, ctx: RequestContext[REQ, RES]) -> RE self._codec.decode(resp.content, response) return response raise ConnectWireError.from_response(resp).to_exception() - except (httpx.TimeoutException, TimeoutError) as e: + except TimeoutError as e: raise ConnectError(Code.DEADLINE_EXCEEDED, "Request timed out") from e except ConnectError: raise @@ -341,31 +339,31 @@ def _send_request_server_stream( def _send_request_bidi_stream( self, request: Iterator[REQ], ctx: RequestContext[REQ, RES] ) -> Iterator[RES]: - request_headers = httpx.Headers(list(ctx.request_headers().allitems())) + request_headers = HTTPHeaders(ctx.request_headers().allitems()) url = f"{self._address}/{ctx.method().service_name}/{ctx.method().name}" if (timeout_ms := ctx.timeout_ms()) is not None: - timeout = _convert_connect_timeout(timeout_ms) + timeout_s = timeout_ms / 1000.0 else: - timeout = USE_CLIENT_DEFAULT + timeout_s = None stream_error: Exception | None = None reader: EnvelopeReader | None = None - resp: httpx.Response | None = None + resp: SyncResponse | None = None try: request_data = _streaming_request_content( request, self._codec, self._send_compression ) - with self._session.stream( + with self._http_client.stream( method="POST", url=url, headers=request_headers, content=request_data, - timeout=timeout, + timeout=timeout_s, ) as resp: handle_response_headers(resp.headers) - if resp.status_code == 200: + if resp.status == 200: self._protocol.validate_stream_response( self._codec.name(), resp.headers.get("content-type", "") ) @@ -379,7 +377,7 @@ def _send_request_bidi_stream( self._read_max_bytes, ) try: - for chunk in resp.iter_bytes(): + for chunk in resp.content: yield from reader.feed(chunk) except ConnectError as e: stream_error = e @@ -391,10 +389,20 @@ def _send_request_bidi_stream( # https://github.com/hyperium/hyper/issues/3681#issuecomment-3734084436 if (t := ctx.timeout_ms()) is not None and t <= 0: raise TimeoutError + reader.handle_response_complete(resp) else: - raise ConnectWireError.from_response(resp).to_exception() - except (httpx.TimeoutException, TimeoutError) as e: + content = bytearray() + for chunk in resp.content: + content.extend(chunk) + fres = FullResponse( + status=resp.status, + headers=resp.headers, + content=bytes(content), + trailers=resp.trailers, + ) + raise ConnectWireError.from_response(fres).to_exception() + except TimeoutError as e: raise ConnectError(Code.DEADLINE_EXCEEDED, "Request timed out") from e except ConnectError: raise @@ -415,17 +423,6 @@ def _send_request_bidi_stream( raise ConnectError(Code.UNAVAILABLE, str(e)) from e -# Convert a timeout with connect semantics to a httpx.Timeout. Connect timeouts -# should apply to an entire operation but this is difficult in synchronous Python code -# to do cross-platform. For now, we just apply the timeout to all httpx timeouts -# if provided, or default to no read/write timeouts but with a connect timeout if -# not provided to match connect-go behavior as closely as possible. -def _convert_connect_timeout(timeout_ms: float | None) -> Timeout: - if timeout_ms is None: - return Timeout(None, connect=30.0) - return Timeout(timeout_ms / 1000.0) - - def _streaming_request_content( msgs: Iterator[Any], codec: Codec, compression: Compression | None ) -> Iterator[bytes]: diff --git a/src/connectrpc/_envelope.py b/src/connectrpc/_envelope.py index d238aa0..71dfe12 100644 --- a/src/connectrpc/_envelope.py +++ b/src/connectrpc/_envelope.py @@ -11,7 +11,7 @@ if TYPE_CHECKING: from collections.abc import Iterator - import httpx + from pyqwest import Response, SyncResponse from ._codec import Codec from ._protocol import ConnectWireError @@ -39,7 +39,7 @@ def __init__( self._next_message_length = None - def feed(self, data: bytes) -> Iterator[_RES]: + def feed(self, data: bytes | memoryview | bytearray) -> Iterator[_RES]: self._buffer.extend(data) return self._read_messages() @@ -93,7 +93,7 @@ def handle_end_message( return False def handle_response_complete( - self, response: httpx.Response, e: ConnectError | None = None + self, response: Response | SyncResponse, e: ConnectError | None = None ) -> None: """Handle any client finalization needed when the response is complete. This is typically used to process trailers for gRPC. diff --git a/src/connectrpc/_protocol.py b/src/connectrpc/_protocol.py index 1de614d..cdb19dd 100644 --- a/src/connectrpc/_protocol.py +++ b/src/connectrpc/_protocol.py @@ -15,7 +15,8 @@ if TYPE_CHECKING: from collections.abc import Iterable, Mapping, Sequence - import httpx + from pyqwest import FullResponse + from pyqwest import Headers as HTTPHeaders from ._codec import Codec from ._compression import Compression @@ -97,16 +98,14 @@ def from_exception(exc: Exception) -> ConnectWireError: return ConnectWireError(Code.UNKNOWN, str(exc), details=()) @staticmethod - def from_response(response: httpx.Response) -> ConnectWireError: + def from_response(response: FullResponse) -> ConnectWireError: try: data = response.json() except Exception: data = None if isinstance(data, dict): - return ConnectWireError.from_dict( - data, response.status_code, Code.UNAVAILABLE - ) - return ConnectWireError.from_http_status(response.status_code) + return ConnectWireError.from_dict(data, response.status, Code.UNAVAILABLE) + return ConnectWireError.from_http_status(response.status) @staticmethod def from_dict( @@ -243,7 +242,7 @@ def validate_stream_response( ... def handle_response_compression( - self, headers: httpx.Headers, *, stream: bool + self, headers: HTTPHeaders, *, stream: bool ) -> Compression: """Handles response compression based on the response headers.""" ... diff --git a/src/connectrpc/_protocol_connect.py b/src/connectrpc/_protocol_connect.py index c81de6e..6f89a40 100644 --- a/src/connectrpc/_protocol_connect.py +++ b/src/connectrpc/_protocol_connect.py @@ -25,7 +25,7 @@ if TYPE_CHECKING: from collections.abc import Iterable, Mapping - import httpx + import pyqwest from ._codec import Codec from ._compression import Compression @@ -269,7 +269,7 @@ def validate_stream_response( ) def handle_response_compression( - self, headers: httpx.Headers, *, stream: bool + self, headers: pyqwest.Headers, *, stream: bool ) -> Compression: compression_header = ( CONNECT_STREAMING_HEADER_COMPRESSION diff --git a/src/connectrpc/_protocol_grpc.py b/src/connectrpc/_protocol_grpc.py index 09a0d6b..2ae8f7f 100644 --- a/src/connectrpc/_protocol_grpc.py +++ b/src/connectrpc/_protocol_grpc.py @@ -25,7 +25,8 @@ if TYPE_CHECKING: from collections.abc import Iterable, Mapping - import httpx + from pyqwest import Headers as HTTPHeaders + from pyqwest import Response, SyncResponse from ._codec import Codec from ._compression import Compression @@ -221,7 +222,7 @@ def validate_stream_response( ) def handle_response_compression( - self, headers: httpx.Headers, *, stream: bool + self, headers: HTTPHeaders, *, stream: bool ) -> Compression: encoding = headers.get(GRPC_HEADER_COMPRESSION) if not encoding: @@ -265,15 +266,11 @@ def handle_end_message( return False def handle_response_complete( - self, response: httpx.Response, e: ConnectError | None = None + self, response: Response | SyncResponse, e: ConnectError | None = None ) -> None: - get_trailers = response.extensions.get("get_trailers") - if not get_trailers: - msg = "gRPC client support requires using an HTTPX-compatible client that supports trailers" - raise RuntimeError(msg) + trailers = response.trailers # Go ahead and feed HTTP trailers regardless of gRPC semantics. - trailers: httpx.Headers = get_trailers() - handle_response_trailers({k: trailers.get_list(k) for k in trailers}) + handle_response_trailers(trailers) # Now handle gRPC trailers. They are either the HTTP trailers if there was body present # or HTTP headers if there was no body. diff --git a/src/connectrpc/_response_metadata.py b/src/connectrpc/_response_metadata.py index a08e2b8..41dff59 100644 --- a/src/connectrpc/_response_metadata.py +++ b/src/connectrpc/_response_metadata.py @@ -10,20 +10,20 @@ from collections.abc import Mapping, Sequence from types import TracebackType - from httpx import Headers as HttpxHeaders + from pyqwest import Headers as HTTPHeaders _current_response = ContextVar["ResponseMetadata"]("connectrpc_current_response") -def handle_response_headers(headers: HttpxHeaders) -> None: +def handle_response_headers(headers: HTTPHeaders) -> None: response = _current_response.get(None) if not response: return response_headers: Headers = Headers() response_trailers: Headers = Headers() - for key, value in headers.multi_items(): + for key, value in headers.items(): if key.startswith("trailer-"): normalized_key = key[len("trailer-") :] obj = response_trailers @@ -37,14 +37,19 @@ def handle_response_headers(headers: HttpxHeaders) -> None: response._trailers = response_trailers # noqa: SLF001 -def handle_response_trailers(trailers: Mapping[str, Sequence[str]]) -> None: +def handle_response_trailers( + trailers: HTTPHeaders | Mapping[str, Sequence[str]], +) -> None: response = _current_response.get(None) if not response: return response_trailers = response.trailers() - for key, values in trailers.items(): - for value in values: + for key, value in trailers.items(): + if isinstance(value, str): response_trailers.add(key, value) + else: + for v in value: + response_trailers.add(key, v) if response_trailers: response._trailers = response_trailers # noqa: SLF001 diff --git a/test/test_client.py b/test/test_client.py index c2df9f5..3c4d681 100644 --- a/test/test_client.py +++ b/test/test_client.py @@ -1,7 +1,8 @@ from __future__ import annotations import pytest -from httpx import ASGITransport, AsyncClient, Client, WSGITransport +from pyqwest import Client, SyncClient +from pyqwest.testing import ASGITransport, WSGITransport from connectrpc.client import ResponseMetadata @@ -68,7 +69,7 @@ def make_hat(self, request, ctx): ) client = HaberdasherClientSync( - "http://localhost", session=Client(transport=transport) + "http://localhost", http_client=SyncClient(transport=transport) ) with ResponseMetadata() as resp: @@ -100,11 +101,11 @@ async def make_hat(self, request, ctx): return Hat() transport = ASGITransport( - HaberdasherASGIApplication(HeadersHaberdasher(headers, trailers)) # pyright:ignore[reportArgumentType] - httpx type is not complete + HaberdasherASGIApplication(HeadersHaberdasher(headers, trailers)) ) client = HaberdasherClient( - "http://localhost", session=AsyncClient(transport=transport) + "http://localhost", http_client=Client(transport=transport) ) with ResponseMetadata() as resp: diff --git a/test/test_details.py b/test/test_details.py index 83a9b67..b6d5ee2 100644 --- a/test/test_details.py +++ b/test/test_details.py @@ -4,7 +4,8 @@ import pytest from google.protobuf.struct_pb2 import Struct, Value -from httpx import ASGITransport, AsyncClient, Client, WSGITransport +from pyqwest import Client, SyncClient +from pyqwest.testing import ASGITransport, WSGITransport from connectrpc.code import Code from connectrpc.errors import ConnectError, pack_any @@ -35,7 +36,7 @@ def make_hat(self, request, ctx) -> NoReturn: app = HaberdasherWSGIApplication(DetailsHaberdasherSync()) with ( HaberdasherClientSync( - "http://localhost", session=Client(transport=WSGITransport(app=app)) + "http://localhost", http_client=SyncClient(transport=WSGITransport(app=app)) ) as client, pytest.raises(ConnectError) as exc_info, ): @@ -65,9 +66,9 @@ async def make_hat(self, request, ctx) -> NoReturn: ) app = HaberdasherASGIApplication(DetailsHaberdasher()) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) async with HaberdasherClient( - "http://localhost", session=AsyncClient(transport=transport) + "http://localhost", http_client=Client(transport=transport) ) as client: with pytest.raises(ConnectError) as exc_info: await client.make_hat(request=Size(inches=10)) diff --git a/test/test_errors.py b/test/test_errors.py index 8e07207..fa3bb0c 100644 --- a/test/test_errors.py +++ b/test/test_errors.py @@ -1,22 +1,23 @@ from __future__ import annotations +import asyncio import threading -import time from http import HTTPStatus from typing import NoReturn import pytest -import uvicorn -from httpx import ( - ASGITransport, - AsyncClient, +from pyqwest import ( Client, - MockTransport, + Headers, Request, Response, - Timeout, - WSGITransport, + SyncClient, + SyncRequest, + SyncResponse, + SyncTransport, + Transport, ) +from pyqwest.testing import ASGITransport, WSGITransport from connectrpc.code import Code from connectrpc.errors import ConnectError @@ -64,16 +65,22 @@ def make_hat(self, request, ctx) -> NoReturn: app = HaberdasherWSGIApplication(haberdasher) transport = WSGITransport(app) - recorded_response: Response | None = None + recorded_response: SyncResponse | None = SyncResponse(status=100) - def record_response(response) -> None: - nonlocal recorded_response - recorded_response = response + class ResponseRecorder(SyncTransport): + def __init__(self, transport: SyncTransport) -> None: + self._transport = transport - session = Client(transport=transport, event_hooks={"response": [record_response]}) + def execute_sync(self, request: SyncRequest) -> SyncResponse: + nonlocal recorded_response + response = self._transport.execute_sync(request) + recorded_response = response + return response + + http_client = SyncClient(transport=ResponseRecorder(transport)) with ( - HaberdasherClientSync("http://localhost", session=session) as client, + HaberdasherClientSync("http://localhost", http_client=http_client) as client, pytest.raises(ConnectError) as exc_info, ): client.make_hat(request=Size(inches=10)) @@ -81,7 +88,7 @@ def record_response(response) -> None: assert exc_info.value.code == code assert exc_info.value.message == message assert recorded_response is not None - assert recorded_response.status_code == http_status + assert recorded_response.status == http_status @pytest.mark.asyncio @@ -96,72 +103,72 @@ async def make_hat(self, request, ctx) -> NoReturn: haberdasher = ErrorHaberdasher(ConnectError(code, message)) app = HaberdasherASGIApplication(haberdasher) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) - recorded_response: Response | None = None + recorded_response: Response | None = Response(status=100) - async def record_response(response) -> None: - nonlocal recorded_response - recorded_response = response + class ResponseRecorder(Transport): + def __init__(self, transport: Transport) -> None: + self._transport = transport - async with ( - AsyncClient( - transport=transport, event_hooks={"response": [record_response]} - ) as session, - HaberdasherClient("http://localhost", session=session) as client, - ): + async def execute(self, request: Request) -> Response: + nonlocal recorded_response + response = await self._transport.execute(request) + recorded_response = response + return response + + http_client = Client(transport=ResponseRecorder(transport)) + async with HaberdasherClient("http://localhost", http_client=http_client) as client: with pytest.raises(ConnectError) as exc_info: await client.make_hat(request=Size(inches=10)) assert exc_info.value.code == code assert exc_info.value.message == message assert recorded_response is not None - assert recorded_response.status_code == http_status + assert recorded_response.status == http_status _http_errors = [ - pytest.param(400, {}, Code.INTERNAL, "Bad Request", id="400"), - pytest.param(401, {}, Code.UNAUTHENTICATED, "Unauthorized", id="401"), - pytest.param(403, {}, Code.PERMISSION_DENIED, "Forbidden", id="403"), - pytest.param(404, {}, Code.UNIMPLEMENTED, "Not Found", id="404"), - pytest.param(429, {}, Code.UNAVAILABLE, "Too Many Requests", id="429"), - pytest.param(499, {}, Code.UNKNOWN, "Client Closed Request", id="499"), - pytest.param(502, {}, Code.UNAVAILABLE, "Bad Gateway", id="502"), - pytest.param(503, {}, Code.UNAVAILABLE, "Service Unavailable", id="503"), - pytest.param(504, {}, Code.UNAVAILABLE, "Gateway Timeout", id="504"), + pytest.param(400, b"", {}, Code.INTERNAL, "Bad Request", id="400"), + pytest.param(401, b"", {}, Code.UNAUTHENTICATED, "Unauthorized", id="401"), + pytest.param(403, b"", {}, Code.PERMISSION_DENIED, "Forbidden", id="403"), + pytest.param(404, b"", {}, Code.UNIMPLEMENTED, "Not Found", id="404"), + pytest.param(429, b"", {}, Code.UNAVAILABLE, "Too Many Requests", id="429"), + pytest.param(499, b"", {}, Code.UNKNOWN, "Client Closed Request", id="499"), + pytest.param(502, b"", {}, Code.UNAVAILABLE, "Bad Gateway", id="502"), + pytest.param(503, b"", {}, Code.UNAVAILABLE, "Service Unavailable", id="503"), + pytest.param(504, b"", {}, Code.UNAVAILABLE, "Gateway Timeout", id="504"), pytest.param( 400, - {"json": {"code": "invalid_argument", "message": "Bad parameter"}}, + b'{"code": "invalid_argument", "message": "Bad parameter"}', + {"content-type": "application/json"}, Code.INVALID_ARGUMENT, "Bad parameter", id="connect error", ), pytest.param( 400, - {"json": {"message": "Bad parameter"}}, + b'{"message": "Bad parameter"}', + {"content-type": "application/json"}, Code.INTERNAL, "Bad parameter", id="connect error without code", ), pytest.param( 404, - {"json": {"code": "not_found"}}, + b'{"code": "not_found"}', + {"content-type": "application/json"}, Code.NOT_FOUND, "", id="connect error without message", ), pytest.param( - 502, {"text": '"{bad_json'}, Code.UNAVAILABLE, "Bad Gateway", id="bad json" + 502, b'"{bad_json', {}, Code.UNAVAILABLE, "Bad Gateway", id="bad json" ), pytest.param( 200, - { - "text": "weird encoding", - "headers": { - "content-type": "application/proto", - "content-encoding": "weird", - }, - }, + b"weird encoding", + {"content-type": "application/proto", "content-encoding": "weird"}, Code.INTERNAL, "unknown encoding 'weird'; accepted encodings are gzip, br, zstd, identity", id="bad encoding", @@ -170,13 +177,22 @@ async def record_response(response) -> None: @pytest.mark.parametrize( - ("response_status", "response_kwargs", "code", "message"), _http_errors + ("response_status", "content", "response_headers", "code", "message"), _http_errors ) -def test_sync_http_errors(response_status, response_kwargs, code, message) -> None: - transport = MockTransport(lambda _: Response(response_status, **response_kwargs)) +def test_sync_http_errors( + response_status, content, response_headers, code, message +) -> None: + class MockTransport(SyncTransport): + def execute_sync(self, request: SyncRequest) -> SyncResponse: + return SyncResponse( + status=response_status, + content=content, + headers=Headers(response_headers), + ) + with ( HaberdasherClientSync( - "http://localhost", session=Client(transport=transport) + "http://localhost", http_client=SyncClient(transport=MockTransport()) ) as client, pytest.raises(ConnectError) as exc_info, ): @@ -187,14 +203,21 @@ def test_sync_http_errors(response_status, response_kwargs, code, message) -> No @pytest.mark.asyncio @pytest.mark.parametrize( - ("response_status", "response_kwargs", "code", "message"), _http_errors + ("response_status", "content", "response_headers", "code", "message"), _http_errors ) async def test_async_http_errors( - response_status, response_kwargs, code, message + response_status, content, response_headers, code, message ) -> None: - transport = MockTransport(lambda _: Response(response_status, **response_kwargs)) + class MockTransport(Transport): + async def execute(self, request: Request) -> Response: + return Response( + status=response_status, + content=content, + headers=Headers(response_headers), + ) + async with HaberdasherClient( - "http://localhost", session=AsyncClient(transport=transport) + "http://localhost", http_client=Client(transport=MockTransport()) ) as client: with pytest.raises(ConnectError) as exc_info: await client.make_hat(request=Size(inches=10)) @@ -292,12 +315,12 @@ def make_hat(self, request, ctx): app = HaberdasherWSGIApplication(ValidHaberdasherSync()) transport = WSGITransport(app) - client = Client(transport=transport) - response = client.request( + client = SyncClient(transport) + response = client.execute( method=method, url=f"http://localhost{path}", content=body, headers=headers ) - assert response.status_code == response_status + assert response.status == response_status assert response.headers == response_headers @@ -315,75 +338,46 @@ async def make_hat(self, request, ctx): haberdasher = ValidHaberdasher() app = HaberdasherASGIApplication(haberdasher) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) - client = AsyncClient(transport=transport) - response = await client.request( + client = Client(transport) + response = await client.execute( method=method, url=f"http://localhost{path}", content=body, headers=headers ) - assert response.status_code == response_status + assert response.status == response_status assert response.headers == response_headers -# To exercise timeouts, can't use mock transports -@pytest.fixture(scope="module") -def timeout_server(): - class SleepingHaberdasher(Haberdasher): - def make_hat(self, request, ctx) -> NoReturn: - time.sleep(10) - raise AssertionError("Should be timedout already") - - app = HaberdasherASGIApplication(SleepingHaberdasher()) - config = uvicorn.Config( - app, port=0, log_level="critical", timeout_graceful_shutdown=0 - ) - server = uvicorn.Server(config) - # Since we want to target the server from sync clients as well as async, - # it's best to use a sync fixture with the server on a background thread. - thread = threading.Thread(target=server.run) - thread.daemon = True - thread.start() - - for _ in range(50): - if server.started: - break - time.sleep(0.1) - assert server.started - - port = server.servers[0].sockets[0].getsockname()[1] - - yield f"http://localhost:{port}" - - server.should_exit = True - - @pytest.mark.parametrize( ("client_timeout_ms", "call_timeout_ms"), [(200, None), (None, 200)] ) -def test_sync_client_timeout( - client_timeout_ms, call_timeout_ms, timeout_server: str -) -> None: +def test_sync_client_timeout(client_timeout_ms, call_timeout_ms) -> None: recorded_timeout_header = "" - def modify_timeout_header(request: Request) -> None: - nonlocal recorded_timeout_header - recorded_timeout_header = request.headers.get("connect-timeout-ms") - # Make sure server doesn't timeout since we are verifying client timeout - request.headers["connect-timeout-ms"] = "10000" + class ModifyTimeout(SyncTransport): + def __init__(self, transport: SyncTransport) -> None: + self._transport = transport + + def execute_sync(self, request: SyncRequest) -> SyncResponse: + nonlocal recorded_timeout_header + recorded_timeout_header = request.headers.get("connect-timeout-ms") + # Make sure server doesn't timeout since we are verifying client timeout + request.headers["connect-timeout-ms"] = "10000" + return self._transport.execute_sync(request) + + timed_out = threading.Event() + + class SleepingHaberdasher(HaberdasherSync): + def make_hat(self, request, ctx) -> NoReturn: + timed_out.wait() + raise AssertionError("Timedout already") + app = HaberdasherWSGIApplication(SleepingHaberdasher()) + http_client = SyncClient(ModifyTimeout(WSGITransport(app))) with ( - Client( - timeout=Timeout( - None, - read=client_timeout_ms / 1000.0 - if client_timeout_ms is not None - else None, - ), - event_hooks={"request": [modify_timeout_header]}, - ) as session, HaberdasherClientSync( - timeout_server, timeout_ms=client_timeout_ms, session=session + "http://localhost", timeout_ms=client_timeout_ms, http_client=http_client ) as client, pytest.raises(ConnectError) as exc_info, ): @@ -392,31 +386,38 @@ def modify_timeout_header(request: Request) -> None: assert exc_info.value.code == Code.DEADLINE_EXCEEDED assert exc_info.value.message == "Request timed out" assert recorded_timeout_header == "200" + timed_out.set() @pytest.mark.asyncio @pytest.mark.parametrize( ("client_timeout_ms", "call_timeout_ms"), [(200, None), (None, 200)] ) -async def test_async_client_timeout( - client_timeout_ms, call_timeout_ms, timeout_server: str -) -> None: +async def test_async_client_timeout(client_timeout_ms, call_timeout_ms) -> None: recorded_timeout_header = "" - async def modify_timeout_header(request: Request) -> None: - nonlocal recorded_timeout_header - recorded_timeout_header = request.headers.get("connect-timeout-ms") - # Make sure server doesn't timeout since we are verifying client timeout - request.headers["connect-timeout-ms"] = "10000" - - async with ( - AsyncClient( - timeout=Timeout(None), event_hooks={"request": [modify_timeout_header]} - ) as session, - HaberdasherClient( - timeout_server, timeout_ms=client_timeout_ms, session=session - ) as client, - ): + class ModifyTimeout(Transport): + def __init__(self, transport: Transport) -> None: + self._transport = transport + + async def execute(self, request: Request) -> Response: + nonlocal recorded_timeout_header + recorded_timeout_header = request.headers.get("connect-timeout-ms") + # Make sure server doesn't timeout since we are verifying client timeout + request.headers["connect-timeout-ms"] = "10000" + return await self._transport.execute(request) + + class SleepingHaberdasher(Haberdasher): + async def make_hat(self, request, ctx) -> NoReturn: + await asyncio.sleep(10) + raise AssertionError("Should be timedout already") + + app = HaberdasherASGIApplication(SleepingHaberdasher()) + http_client = Client(ModifyTimeout(ASGITransport(app))) + + async with HaberdasherClient( + "http://localhost", timeout_ms=client_timeout_ms, http_client=http_client + ) as client: with pytest.raises(ConnectError) as exc_info: await client.make_hat(request=Size(inches=10), timeout_ms=call_timeout_ms) diff --git a/test/test_example.py b/test/test_example.py index c3a4193..87378be 100644 --- a/test/test_example.py +++ b/test/test_example.py @@ -27,7 +27,6 @@ def test_sync(sync_server: WSGIServer) -> None: ) as client: response = client.say(SayRequest(sentence="Hello")) assert len(response.sentence) > 0 - assert client._session.is_closed @pytest.mark.asyncio @@ -37,4 +36,3 @@ async def test_async_client_basic(sync_server: WSGIServer) -> None: ) as client: response = await client.say(SayRequest(sentence="Hello")) assert len(response.sentence) > 0 - assert client._session.is_closed diff --git a/test/test_interceptor.py b/test/test_interceptor.py index dfd1a11..e08633f 100644 --- a/test/test_interceptor.py +++ b/test/test_interceptor.py @@ -5,7 +5,8 @@ import pytest import pytest_asyncio -from httpx import ASGITransport, AsyncClient, Client, WSGITransport +from pyqwest import Client, SyncClient +from pyqwest.testing import ASGITransport, WSGITransport from .haberdasher_connect import ( Haberdasher, @@ -65,11 +66,11 @@ async def make_various_hats(self, request, ctx): yield Hat(size=s.inches, color=next(colors)) app = HaberdasherASGIApplication(SimpleHaberdasher(), interceptors=(interceptor,)) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) async with HaberdasherClient( "http://localhost", interceptors=(interceptor,), - session=AsyncClient(transport=transport), + http_client=Client(transport=transport), ) as client: yield client @@ -150,11 +151,11 @@ def make_various_hats(self, request, ctx): app = HaberdasherWSGIApplication( SimpleHaberdasherSync(), interceptors=(interceptor,) ) - transport = WSGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = WSGITransport(app) with HaberdasherClientSync( "http://localhost", interceptors=(interceptor,), - session=Client(transport=transport), + http_client=SyncClient(transport), ) as client: yield client diff --git a/test/test_lifespan.py b/test/test_lifespan.py index 90ba14b..dbe23f1 100644 --- a/test/test_lifespan.py +++ b/test/test_lifespan.py @@ -1,13 +1,10 @@ from __future__ import annotations -import asyncio from collections import Counter -from io import StringIO import pytest -import uvicorn -from httpx import ASGITransport, AsyncClient -from uvicorn.config import LOGGING_CONFIG +from pyqwest import Client +from pyqwest.testing import ASGITransport from connectrpc.errors import ConnectError @@ -42,31 +39,15 @@ async def counting_haberdasher(): final_count = counter["requests"] app = HaberdasherASGIApplication(counting_haberdasher()) - # Use uvicorn since it supports lifespan - config = uvicorn.Config( - app, port=0, log_level="critical", timeout_graceful_shutdown=0 - ) - server = uvicorn.Server(config) - uvicorn_task = asyncio.create_task(server.serve()) - - for _ in range(50): - if server.started: - break - await asyncio.sleep(0.1) - else: - msg = "Server did not start" - raise RuntimeError(msg) - - port = server.servers[0].sockets[0].getsockname()[1] - - async with HaberdasherClient(f"http://localhost:{port}") as client: + async with ( + ASGITransport(app) as transport, + HaberdasherClient("http://localhost", http_client=Client(transport)) as client, + ): for _ in range(5): hat = await client.make_hat(Size(inches=10)) assert hat.size == 10 assert hat.color == "blue" - server.should_exit = True - await uvicorn_task assert final_count == 5 @@ -88,21 +69,13 @@ async def counting_haberdasher(): final_count = counter["requests"] app = HaberdasherASGIApplication(counting_haberdasher()) - # Use uvicorn since it supports lifespan - logs = StringIO() - log_config = LOGGING_CONFIG.copy() - log_config["handlers"]["default"]["stream"] = logs - config = uvicorn.Config( - app, - port=0, - log_level="error", - timeout_graceful_shutdown=0, - log_config=log_config, - ) - server = uvicorn.Server(config) - await server.serve() + with pytest.raises( + RuntimeError, match="ASGI application failed to start up" + ) as exc_info: + async with ASGITransport(app): + pass - assert "Haberdasher failed to start" in logs.getvalue() + assert "Haberdasher failed to start" in str(exc_info.value) @pytest.mark.asyncio @@ -117,39 +90,12 @@ async def counting_haberdasher(): raise RuntimeError(msg) app = HaberdasherASGIApplication(counting_haberdasher()) - # Use uvicorn since it supports lifespan - logs = StringIO() - log_config = LOGGING_CONFIG.copy() - log_config["handlers"]["default"]["stream"] = logs - config = uvicorn.Config( - app, - port=0, - log_level="error", - timeout_graceful_shutdown=0, - log_config=log_config, - ) - server = uvicorn.Server(config) - uvicorn_task = asyncio.create_task(server.serve()) - - for _ in range(50): - if server.started: - break - await asyncio.sleep(0.1) - else: - msg = "Server did not start" - raise RuntimeError(msg) - - port = server.servers[0].sockets[0].getsockname()[1] - - async with HaberdasherClient(f"http://localhost:{port}") as client: - for _ in range(5): - hat = await client.make_hat(Size(inches=10)) - assert hat.size == 10 - assert hat.color == "blue" - - server.should_exit = True - await uvicorn_task - assert "Haberdasher failed to shut down" in logs.getvalue() + with pytest.raises( + RuntimeError, match="ASGI application failed to shut down" + ) as exc_info: + async with ASGITransport(app): + pass + assert "Haberdasher failed to shut down" in str(exc_info.value) @pytest.mark.asyncio @@ -166,13 +112,13 @@ async def counting_haberdasher(): final_count = counter["requests"] app = HaberdasherASGIApplication(counting_haberdasher()) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) async with HaberdasherClient( - "http://localhost", session=AsyncClient(transport=transport) + "http://localhost", http_client=Client(transport) ) as client: - with pytest.raises(ConnectError) as e: + with pytest.raises(ConnectError): await client.make_hat(Size(inches=10)) assert ( "ASGI server does not support lifespan but async generator passed for service." - in str(e.value) + in str(transport.app_exception) ) diff --git a/test/test_roundtrip.py b/test/test_roundtrip.py index 67f4501..f3a6303 100644 --- a/test/test_roundtrip.py +++ b/test/test_roundtrip.py @@ -3,7 +3,8 @@ from typing import TYPE_CHECKING import pytest -from httpx import ASGITransport, AsyncClient, Client, WSGITransport +from pyqwest import Client, SyncClient +from pyqwest.testing import ASGITransport, WSGITransport from connectrpc.code import Code from connectrpc.errors import ConnectError @@ -32,7 +33,7 @@ def make_hat(self, request, ctx): app = HaberdasherWSGIApplication(RoundtripHaberdasherSync()) with HaberdasherClientSync( "http://localhost", - session=Client(transport=WSGITransport(app=app)), + http_client=SyncClient(WSGITransport(app=app)), proto_json=proto_json, send_compression=compression, accept_compression=[compression] if compression else None, @@ -51,10 +52,10 @@ async def make_hat(self, request, ctx): return Hat(size=request.inches, color="green") app = HaberdasherASGIApplication(DetailsHaberdasher()) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) async with HaberdasherClient( "http://localhost", - session=AsyncClient(transport=transport), + http_client=Client(transport), proto_json=proto_json, send_compression=compression, accept_compression=[compression] if compression else None, @@ -78,12 +79,12 @@ async def make_similar_hats(self, request, ctx): raise ConnectError(Code.RESOURCE_EXHAUSTED, "No more hats available") app = HaberdasherASGIApplication(StreamingHaberdasher()) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) hats: list[Hat] = [] async with HaberdasherClient( "http://localhost", - session=AsyncClient(transport=transport), + http_client=Client(transport=transport), proto_json=proto_json, send_compression=compression, accept_compression=[compression] if compression else None, @@ -125,10 +126,10 @@ def make_various_hats(self, request: Iterator[Size], ctx) -> Iterator[Hat]: yield good_hat if client_bad else bad_hat app = HaberdasherWSGIApplication(LargeHaberdasher(), read_max_bytes=100) - transport = WSGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = WSGITransport(app) with HaberdasherClientSync( "http://localhost", - session=Client(transport=transport), + http_client=SyncClient(transport), send_compression=compression, accept_compression=[compression] if compression else None, read_max_bytes=100, @@ -190,10 +191,10 @@ async def make_various_hats( yield good_hat if client_bad else bad_hat app = HaberdasherASGIApplication(LargeHaberdasher(), read_max_bytes=100) - transport = ASGITransport(app) # pyright:ignore[reportArgumentType] - httpx type is not complete + transport = ASGITransport(app) async with HaberdasherClient( "http://localhost", - session=AsyncClient(transport=transport), + http_client=Client(transport=transport), send_compression=compression, accept_compression=[compression] if compression else None, read_max_bytes=100, diff --git a/uv.lock b/uv.lock index 2e97a50..2010db5 100644 --- a/uv.lock +++ b/uv.lock @@ -360,8 +360,8 @@ name = "connect-python" version = "0.7.1" source = { editable = "." } dependencies = [ - { name = "httpx" }, { name = "protobuf" }, + { name = "pyqwest" }, ] [package.dev-dependencies] @@ -374,10 +374,8 @@ dev = [ { name = "granian" }, { name = "grpcio-tools" }, { name = "gunicorn", extra = ["gevent"] }, - { name = "httpx", extra = ["http2"] }, { name = "hypercorn" }, { name = "just-bin", marker = "sys_platform != 'win32'" }, - { name = "pyqwest" }, { name = "pyright", extra = ["nodejs"] }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -393,8 +391,8 @@ dev = [ [package.metadata] requires-dist = [ - { name = "httpx", specifier = ">=0.28.1" }, { name = "protobuf", specifier = ">=5.28" }, + { name = "pyqwest", specifier = ">=0.3.2" }, ] [package.metadata.requires-dev] @@ -407,10 +405,8 @@ dev = [ { name = "granian", specifier = "==2.5.7" }, { name = "grpcio-tools", specifier = "==1.76.0" }, { name = "gunicorn", extras = ["gevent"], specifier = "==23.0.0" }, - { name = "httpx", extras = ["http2"], specifier = "==0.28.1" }, { name = "hypercorn", specifier = "==0.17.3" }, { name = "just-bin", marker = "sys_platform != 'win32'", specifier = "==1.42.4" }, - { name = "pyqwest", specifier = "==0.1.0" }, { name = "pyright", extras = ["nodejs"], specifier = "==1.1.405" }, { name = "pytest" }, { name = "pytest-asyncio" }, @@ -1100,11 +1096,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] -[package.optional-dependencies] -http2 = [ - { name = "h2" }, -] - [[package]] name = "hypercorn" version = "0.17.3" @@ -1566,52 +1557,52 @@ wheels = [ [[package]] name = "pyqwest" -version = "0.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/99/af/0a40903f7774f6426d162e706eb827927cd93fb426db25185f5420723bd2/pyqwest-0.1.0.tar.gz", hash = "sha256:3d46d41c490dd427578852284a8a0a288c9c2fe100fb2d7a995b24fbe5bdcec3", size = 388361, upload-time = "2026-01-11T12:48:59.424Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b8/34/3f9a3ecc7d07fe5235c8ce66dd508725b5e07ac478d3875d5ed1ab08d84d/pyqwest-0.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fde67a58bf2818334ec28078f0ad136bf949853781fba853ff1f2ea14383c7cf", size = 4643938, upload-time = "2026-01-11T12:47:52.006Z" }, - { url = "https://files.pythonhosted.org/packages/ea/25/419831910dd2482d72447e33caa51de72c27eeb387aba856c45cb974b0d6/pyqwest-0.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:381c9791367c0fa0412d30e36d565ee5c852f17f8a0039fc105b0948872f2d20", size = 4946931, upload-time = "2026-01-11T12:47:53.915Z" }, - { url = "https://files.pythonhosted.org/packages/ae/46/85f1f8d7ece18bb353bc747f74bbd35e83a8cc81ca89fef6f5299968c9e3/pyqwest-0.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:595feda76a57b4e66e40aba1ff8311240279007557772c8c1ba1e4619aea91a7", size = 4964379, upload-time = "2026-01-11T12:47:55.372Z" }, - { url = "https://files.pythonhosted.org/packages/2a/43/7bd95cba1a3aa3e4bff030ae4702ce9f77d79f3b5a96d904d58f7f7ac8d9/pyqwest-0.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:771fde2a689dd91d86e594e260157894e9c20bb7ed361d24df52bebc0518de7d", size = 5134230, upload-time = "2026-01-11T12:47:57.161Z" }, - { url = "https://files.pythonhosted.org/packages/ca/2c/353fc236322dad943e90605d261f8b80e2713417b12555d5c844cb87716e/pyqwest-0.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2edc31a3fac5793e0fe4bf9f985667d3e449c22a919c1a099e7b2cce4bde36fb", size = 5272762, upload-time = "2026-01-11T12:47:58.775Z" }, - { url = "https://files.pythonhosted.org/packages/c1/6d/04e11fa0758260497c9c8b41fa88f9573323bd6872884bae083bdd644e27/pyqwest-0.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:03a90a60081af25eabd381e8ef738e76f2bfe4ca0d1fb97b33f27784c900270d", size = 4135779, upload-time = "2026-01-11T12:48:00.295Z" }, - { url = "https://files.pythonhosted.org/packages/19/d2/bf9ca7f42571b9803ccf9efe6f859bb5855ba0bd139b24e3fd14163517d2/pyqwest-0.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:440ace438c318022581d6f4dbb4db126c27eac3e833be81492019e6b7c756ffd", size = 4642506, upload-time = "2026-01-11T12:48:01.905Z" }, - { url = "https://files.pythonhosted.org/packages/00/c7/a740b87d628a063d80a81ad98bdf4ce53d364cb52a2e1fa7246baa99b914/pyqwest-0.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62e52b49399cfae41a1691bf2908c21cf33d0661a2c398ff11262650508825a7", size = 4949646, upload-time = "2026-01-11T12:48:03.258Z" }, - { url = "https://files.pythonhosted.org/packages/e8/67/5f87096e56ec2d6deca1bd3b14238229fe98c5e9c61acf1c23ea535d0e86/pyqwest-0.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:752512dff7e815ad24fc8cbc967da9c9ca4dbea95f90994655bf42fd68b3a97d", size = 4967856, upload-time = "2026-01-11T12:48:04.747Z" }, - { url = "https://files.pythonhosted.org/packages/ba/2e/2e6e1740a430adab5dd8d89aa2416449dad561a4943b4c08e48a65b70db3/pyqwest-0.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b04c3bbb7257c294ad17e99a16910e5d7fed8fe584e214b37de04e74553d841c", size = 5137749, upload-time = "2026-01-11T12:48:06.62Z" }, - { url = "https://files.pythonhosted.org/packages/e2/3d/89131413f7463407fec88045bcaf02f9b0b4cb2e3283b72d6fc6c3d4335c/pyqwest-0.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:32f40b0abbacc3f55d07b65d40f7040ddb02216b6f97519a2c1665c9699fe6fd", size = 5276383, upload-time = "2026-01-11T12:48:08.082Z" }, - { url = "https://files.pythonhosted.org/packages/e0/57/972b4b56a465dc66a735da0c781c65f1b4d65a8834bdf260f5e60e38b17c/pyqwest-0.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:b7bc7a2c0214a8b1371ed2e007fe5a2218be5d2eaf8406ed3c8240ee76844730", size = 4142489, upload-time = "2026-01-11T12:48:09.959Z" }, - { url = "https://files.pythonhosted.org/packages/58/20/429ae13e269a6042ce9ef1b1f459edd470c667395f849ee4c96e4dcfd3de/pyqwest-0.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c0b9623e7b9531e57d564de94a512db106f612881923bf073f6cb9726e1a1fa2", size = 4640061, upload-time = "2026-01-11T12:48:11.557Z" }, - { url = "https://files.pythonhosted.org/packages/cd/8e/e9d7fbbea7de86bb77eccaf8ca25e38738847d64781aa2440cf75044c351/pyqwest-0.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29df3d76dd21dea00013495812f789ab5d34b380897e1c32f48a90fef7977a0b", size = 4954669, upload-time = "2026-01-11T12:48:13.092Z" }, - { url = "https://files.pythonhosted.org/packages/f2/36/7cd96800470cf9d3de1418c6268f9cd7d8ccffceb74c7f1b5701e3249cfd/pyqwest-0.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:085c578c62f94b490e3544ba5b81da40a4b6428cc2be05d5f5c9b8fc1ed2bf28", size = 4971272, upload-time = "2026-01-11T12:48:14.867Z" }, - { url = "https://files.pythonhosted.org/packages/fb/f6/63c87973f4b29fd343313562190de41299e3a92e06cf47dc240730308c06/pyqwest-0.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:deaf53bf89e0add516e92fcb54403125708ba987ad63647154ee77fed2b9b491", size = 5140426, upload-time = "2026-01-11T12:48:16.692Z" }, - { url = "https://files.pythonhosted.org/packages/c4/18/ca072fa26af6af7adcf8ed9aeb8b4c8dae33da40c8b71b3b29fc183d8169/pyqwest-0.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2d68989d760b61c19480125cc88c4a6eb389cd6655dcdfbd09431db256a865b1", size = 5282165, upload-time = "2026-01-11T12:48:18.286Z" }, - { url = "https://files.pythonhosted.org/packages/2e/7b/b59b9230a3b909ecccf55b93baf1da202058eba8cdc2765e0247ab75eb91/pyqwest-0.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:8c99af411b43fb5075d5529842c5ca5ee980cd5b665cbc67878c28ae7d174e75", size = 4144298, upload-time = "2026-01-11T12:48:19.726Z" }, - { url = "https://files.pythonhosted.org/packages/50/dc/d3e4ee3ae335b8de8b5d487b29a7f4d285ba42bbd0220ec43c28c6515cc8/pyqwest-0.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:18a8da35b0f36c0f9b3a6d843d79e27c02e975c4e3b4709294e28d8adf89697d", size = 4639203, upload-time = "2026-01-11T12:48:21.181Z" }, - { url = "https://files.pythonhosted.org/packages/d3/6a/afa3348738fac1a7eb0655dfe2a184468dea9e7fcbf6893dea7628c03157/pyqwest-0.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f544df941383ef9a8610675526468a02ce6806e1d581f346a3b0fd4346ac26db", size = 4954570, upload-time = "2026-01-11T12:48:22.792Z" }, - { url = "https://files.pythonhosted.org/packages/9d/65/81dc839a591c6fedbce4049ef6016ca3b06e842fffac57fcf826a26f53bd/pyqwest-0.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c8bb1f89c929c148e9c8b1052aa35773c5e80aeea3f0982ae4e579b41dc3627", size = 4971174, upload-time = "2026-01-11T12:48:24.495Z" }, - { url = "https://files.pythonhosted.org/packages/87/56/f4a0ed74017f9b21f10847661311376698de309f3586c5a147f248de7fbb/pyqwest-0.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:22a293cd6e4bd35cc023fd8de2b0d00a7ef513040fd0377d2ee274a7fe4389ed", size = 5139044, upload-time = "2026-01-11T12:48:25.992Z" }, - { url = "https://files.pythonhosted.org/packages/37/ab/f8e320e2a165e99c9177a6aae97ca9f5a539b93f6d6f7602c7ae3687fcdf/pyqwest-0.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9593ba2c8db6a3fe353a6731b884e76a8212ecfa86af4a4380a4945fafc71c63", size = 5282032, upload-time = "2026-01-11T12:48:27.541Z" }, - { url = "https://files.pythonhosted.org/packages/d3/0e/b157099a9da34fd9c9df80a38e593a86786c876985c7ef2268f5c9b0ee8b/pyqwest-0.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:82f0112bd2aadfc8b26cc8bcaf8805bc7cea47e8779cec2bfe99bcac80990e26", size = 4143984, upload-time = "2026-01-11T12:48:29.329Z" }, - { url = "https://files.pythonhosted.org/packages/33/26/bba3f380655f17e29536706556492d88618f11eef51967ad20b724380585/pyqwest-0.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a5cc9dff673c3f08550a163130e426a0733edd2b686ee44ee1b67e2a629ae9f3", size = 4630320, upload-time = "2026-01-11T12:48:30.829Z" }, - { url = "https://files.pythonhosted.org/packages/22/bf/4288e904d5eb4acb7429548d82001fa9869050b219efc01f409c6d0706f8/pyqwest-0.1.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7b41771d2ae2b8a1761ef3eff15326338e239149f161d5fe75a9085db239a22", size = 4948466, upload-time = "2026-01-11T12:48:32.295Z" }, - { url = "https://files.pythonhosted.org/packages/0e/f6/2299a8bfb991f96e8f8a67645cafff909b1b41417d2713dc49a443e4475c/pyqwest-0.1.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8b2122c2504351e8a697a3f09da6b17ea4c1260f0db47346461da06f5ca7973", size = 4964217, upload-time = "2026-01-11T12:48:34.058Z" }, - { url = "https://files.pythonhosted.org/packages/f2/b2/80d74a8bea12fe8f783a99c282ee1e0e47877d4708b37cb55f4a2f6a3650/pyqwest-0.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fd750245298d792374ad1429d2c8b0f2ad9b20a71c9e47dc03aa85d7589f48eb", size = 5133456, upload-time = "2026-01-11T12:48:35.5Z" }, - { url = "https://files.pythonhosted.org/packages/0b/aa/7a88e915b81194f18cdc378df7fa5fb7e9ab7a85b11fb35e49c0724a1078/pyqwest-0.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3edc41632c084edfc9f9d7c93a84244a0cc09e8976890deebcf69fd6a0d9c291", size = 5275121, upload-time = "2026-01-11T12:48:37.124Z" }, - { url = "https://files.pythonhosted.org/packages/c1/79/acc6cb2806b4d5f3b97546fc2f81d215551bc0a361de1dcf239cc2786e02/pyqwest-0.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:6a3804cca0f034912af586b7b95f7feaa37ea7ee524e15a62dc0eea12386d523", size = 4136557, upload-time = "2026-01-11T12:48:38.704Z" }, - { url = "https://files.pythonhosted.org/packages/93/35/23bd0d80f963eab113691546ef367db6e3e9360ee9db2379f5e3e73a42af/pyqwest-0.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a67e1da99985f782deda17b3221badb9ad5a424016ec56f01d97f04b52fe530", size = 4634702, upload-time = "2026-01-11T12:48:40.135Z" }, - { url = "https://files.pythonhosted.org/packages/0c/ab/b9468cc4f3438f6d1eda39a83c179432f34ac416c48c214d60eb7e776753/pyqwest-0.1.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299579fce767a7e5d8bf70cdf6ae8cab4c70a20b3e1d6d3ee852d8d9b519160c", size = 4942523, upload-time = "2026-01-11T12:48:41.821Z" }, - { url = "https://files.pythonhosted.org/packages/31/03/4ac7305619bfeb5157d33d3659529c7f01d31d770451e6010b7f87df8276/pyqwest-0.1.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28bab85f7829e179bb494f44f151246e9232dadd549d567f8b2bc9cf8f6af95b", size = 4962586, upload-time = "2026-01-11T12:48:43.369Z" }, - { url = "https://files.pythonhosted.org/packages/d6/9b/36aee889a09f7573e2b94af96c4c33eda75b23fab0685cc40463c88a0dbf/pyqwest-0.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1cb50d27252e9eeb309d1f11a321d65cc7d8bc151ff3357d2045dca2a38d1876", size = 5128760, upload-time = "2026-01-11T12:48:44.995Z" }, - { url = "https://files.pythonhosted.org/packages/44/12/4dd546bfdfd98cb1afff12c08fc2a750703a1dd47454e04d924129ae26bb/pyqwest-0.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:386f48f76e7499703b7edb4d26112b92943adc7fa3c09186cb67bba15bf7fa3b", size = 5272754, upload-time = "2026-01-11T12:48:46.528Z" }, - { url = "https://files.pythonhosted.org/packages/10/0b/8417fbf765ecb892378aaa4592c4c2f8e2c65e2c8aa95280c7588424396f/pyqwest-0.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:a6cf36d60a8626d1907c71494334eb3ee837c792d3cff2d243257217e5ce2720", size = 4131758, upload-time = "2026-01-11T12:48:48.068Z" }, - { url = "https://files.pythonhosted.org/packages/40/20/f148f999a9b139b9d2ae02f93328bf0060f1642395811b0eb13c70c2df73/pyqwest-0.1.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:b59e0435680c8192e9d54fb578f94cad98c744594343e6b5d73ec4be3cb4f2f7", size = 4641938, upload-time = "2026-01-11T12:48:49.511Z" }, - { url = "https://files.pythonhosted.org/packages/97/55/935e525c3a5f74dbe704e1a232fa6804d5a861d5f72333e2ec1d4a9cf70a/pyqwest-0.1.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8bd2102ce661d7bc06b4e3c3653ee9ed4e45920625a3e2f66a661a547f264012", size = 4952562, upload-time = "2026-01-11T12:48:50.963Z" }, - { url = "https://files.pythonhosted.org/packages/76/cc/8be13bf27d0736b95f0b33cc1b5a81d86eae20a49d3eb0f5a40fd74e5168/pyqwest-0.1.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd4307c832572098e520d1d2370aa55eb16e0a03197c5803e7683e875415999e", size = 4968141, upload-time = "2026-01-11T12:48:52.428Z" }, - { url = "https://files.pythonhosted.org/packages/4c/33/4c3c787ab22f2fd2c194641932799361b0f94306c593a1deecb79e89cbd1/pyqwest-0.1.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:b88e4a13c5e49499687a1e5e19ce1c5c3a8160b4558d10db23099b72c11ad47b", size = 5139146, upload-time = "2026-01-11T12:48:54.023Z" }, - { url = "https://files.pythonhosted.org/packages/e0/0b/a501ffb7796fbe1bb1e0230cecfb6d2a70327e0fbaf9e464d555205e7554/pyqwest-0.1.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:acce5ee2eb1e8b875b5fd3037f290152565884c999c969ef1403f0ccf6ba85fe", size = 5277425, upload-time = "2026-01-11T12:48:56.274Z" }, - { url = "https://files.pythonhosted.org/packages/3b/2a/d44c6ea9fbebb160ba395cfabfd627b7362f56381286e4383b669e9b9902/pyqwest-0.1.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:494720404202173bb7a12df05fbbf3a63d7a7843a9bd6895cf62bcd82680a0d6", size = 4139236, upload-time = "2026-01-11T12:48:58.131Z" }, +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/b3/85fe09ef163f988a958b809d9d2f867481b6be59ce79a37f717acb9ba614/pyqwest-0.3.2.tar.gz", hash = "sha256:161495d52a3ba03f4209fd16520afdf0972bf7ddc5fc58dfc97bf13d8aa3d805", size = 422952, upload-time = "2026-01-20T14:04:29.132Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/99/af/2a6d3f52d39ef627bfc1968c1858b5464b55a6fcac93358861c14e286277/pyqwest-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9d5eab33e4411eb5c602ded35a4ae86f2ca80de328806e7c4415bfba3fc45f25", size = 4912496, upload-time = "2026-01-20T14:03:07.205Z" }, + { url = "https://files.pythonhosted.org/packages/d3/46/87102812ee84c09c165d4cfea57a20729e19cd717d34b82f58ef69d06392/pyqwest-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26ad3dca1fee4dd236896e5067bd8774a32e468aaa09c17d28efbd40cd7a36de", size = 5277378, upload-time = "2026-01-20T14:03:09.389Z" }, + { url = "https://files.pythonhosted.org/packages/bb/62/0511ce8cfaec080a29271c3fa68afdf4ad4e6435cd82e79608e994a5c26b/pyqwest-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb41051caf2beb7cb2e01bc66a59292e54936e889b6ee826ba7c75b244307218", size = 5310976, upload-time = "2026-01-20T14:03:11.262Z" }, + { url = "https://files.pythonhosted.org/packages/a9/dd/75a8332e7110f71a9686d99707ab6c35d055dca1637bfe5504c205431a98/pyqwest-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f3e90574ceefb0c30ec36bd45ea1b9550e2991142fee9c97440347a6bb0b78ec", size = 5467789, upload-time = "2026-01-20T14:03:13.526Z" }, + { url = "https://files.pythonhosted.org/packages/75/a0/631267ea81a5429184c46a2345af651fce97a31c4c2376b515c8d4f51001/pyqwest-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ef3a29f1eae3f456de91806f460150e3e0b381c5e87c20354512274a485c4bf3", size = 5616173, upload-time = "2026-01-20T14:03:15.193Z" }, + { url = "https://files.pythonhosted.org/packages/06/f3/9642061e2d2638bbf47e5106fdbd378f698026b7dd2384fd7f41440c141c/pyqwest-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:2b07b3924491ae947edc4631dbfed368b7cd562d4849c4effb3ec7badacde4ec", size = 4523668, upload-time = "2026-01-20T14:03:16.806Z" }, + { url = "https://files.pythonhosted.org/packages/ee/27/3cf90c4ea3ee3ffb5efad693e353c8f7b87a8a509882561ad86d2924eb17/pyqwest-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a1fbacc976aae25f4b357f720bda90784d331c4e23439f47f5952c4f8ebf0e8", size = 4916559, upload-time = "2026-01-20T14:03:18.867Z" }, + { url = "https://files.pythonhosted.org/packages/ae/20/70869a3a88aae3b4fa4ae047ec9cfa769afcb550fd5b7157f9e58397ae92/pyqwest-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a120f3564530aaa994f41adca9dfdb00ae1f059c43cc94914b4ea7a84d59409f", size = 5274736, upload-time = "2026-01-20T14:03:20.597Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f7/c4b217531d1abb69a9b0eac0dc71548fac151de39abebd38b0ba0c5cfc99/pyqwest-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0882dbad025f8b0a137d19dc77c2ae773b93d20cca8fb09e8f96d7d6030df027", size = 5307837, upload-time = "2026-01-20T14:03:22.536Z" }, + { url = "https://files.pythonhosted.org/packages/b4/af/8db4ada268a5fe327861e33b550f2db0cf7a819e8775e47c193c66ff6542/pyqwest-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a923d1e944dcff372fc107685835402bc14cb79c60ff1fa3529b874f696652d", size = 5466229, upload-time = "2026-01-20T14:03:24.548Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c0/66e3c5a39898200ad34704eb5158e86693c1cb488514f623feeb136dcba8/pyqwest-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fef4186fe2496774f4ccaf1b02c72d97f77a3cafeaf029cceba19a92108b7978", size = 5614493, upload-time = "2026-01-20T14:03:26.258Z" }, + { url = "https://files.pythonhosted.org/packages/48/15/9fb1724d44ba33b0f03b13300c41d4b6eb1a4f81055f7f13e8417fbfea83/pyqwest-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:74002da65737bf0d2d854530d66241d5419c0554b6f5d2d03a46c6d952e67a31", size = 4520791, upload-time = "2026-01-20T14:03:28.018Z" }, + { url = "https://files.pythonhosted.org/packages/b3/fd/c99f20b5f09a71f9ba58361e29f856eedc94ce5ee982cb8da0b61a46e9c9/pyqwest-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9845b6b735cff9ee3113f007b45c07c4ee672313c4debff608e9debc59066631", size = 4908963, upload-time = "2026-01-20T14:03:29.784Z" }, + { url = "https://files.pythonhosted.org/packages/c4/e5/fd8dd570145688696c9340f6a7450761c4942a262261f850d22fa51df1af/pyqwest-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e56ae29ca1c484543cd37de28be3d4cce24bab71da822dd986f93d669b000f0", size = 5269817, upload-time = "2026-01-20T14:03:31.794Z" }, + { url = "https://files.pythonhosted.org/packages/14/07/8a1810bd9b7a98805f980825ddf274a5cc9892b08fc2fc404cb6bbf64dc6/pyqwest-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:899a55afe6b0ff0e39c7af84c4a21413a9395e13341f7f5cfabc7725b3269566", size = 5310724, upload-time = "2026-01-20T14:03:33.916Z" }, + { url = "https://files.pythonhosted.org/packages/9a/bc/8d54d4e5f15408952e7ba433bcbddd4d48bc2acc5777e602fc972f936a60/pyqwest-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a293ff0c33c09c6080f8b8a50146229c4d7739b4f207b98d9dbf774c8417e919", size = 5458812, upload-time = "2026-01-20T14:03:35.578Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6c/cedd22d22b78212f9c417be4e42392da958140166de3dce0684251c9d478/pyqwest-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:89b6800a5ca7f31962777fd8cccdafff25022944eee2b7b0a0eeb741b42fe87b", size = 5616260, upload-time = "2026-01-20T14:03:37.358Z" }, + { url = "https://files.pythonhosted.org/packages/77/0d/cee17151403bd64e4dbd5094f312270d93c57519d7d75e0c8b52520b29b8/pyqwest-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:58d479901118917d63190f24a0878e325c1b66fd1c9b63ee2024e59a96172be2", size = 4533230, upload-time = "2026-01-20T14:03:40.859Z" }, + { url = "https://files.pythonhosted.org/packages/45/95/a50fe2deb2a8ee6ea2e417ee643db0fbecd91ea78aee608cce232308928b/pyqwest-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e4ece8ea9a6fd267cd344d343a74499c5cf5a8be7ed493250c7784a5c28674f", size = 4908349, upload-time = "2026-01-20T14:03:42.506Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8c/0a4defcf620c3f0ab0a79e4cb9b52613bf31f3854affde78a1ace1c19a17/pyqwest-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc141eb5b5f7141fd89b596be473e4e4c80bedd626e37a1145d166e7f2b7f7d", size = 5268297, upload-time = "2026-01-20T14:03:44.504Z" }, + { url = "https://files.pythonhosted.org/packages/f3/11/b6ea124cf479dc400352f35694f10993a9002416bb0e4ec9c3e89c023e4a/pyqwest-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9384d99d5008386b92b663323690d797f0f0a93022306c1ae8e945fd80357068", size = 5310437, upload-time = "2026-01-20T14:03:46.526Z" }, + { url = "https://files.pythonhosted.org/packages/c3/67/7b1ffc98a10da37ac1bbc266d1fa8b54a17e886490fbc5f562d6110a6233/pyqwest-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52a32f4a7ca21f2bcc1e1844d3b28bf49f904b27f5768b28f8048fdfebe1c739", size = 5458247, upload-time = "2026-01-20T14:03:48.672Z" }, + { url = "https://files.pythonhosted.org/packages/cd/7b/6af6a4436ab1c2dc569e8228909194ca8d28b20383659897d9aac214fdad/pyqwest-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:82f2571494583434a01959fb55968046812b86b89090d836a846a89720408d66", size = 5615352, upload-time = "2026-01-20T14:03:50.759Z" }, + { url = "https://files.pythonhosted.org/packages/43/f2/3a36ef576d4cd8347fccd75fea14012dd4b7482baf14a0b0b9b578636082/pyqwest-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a579eba5299dbfd0deedeee7c4eebb5656a274311f82007a3e319f8423ea8b35", size = 4533331, upload-time = "2026-01-20T14:03:52.478Z" }, + { url = "https://files.pythonhosted.org/packages/90/f7/1903d7703d99cf55a5f874a3b7ded2fbbc0a016fa379e7935d302869d963/pyqwest-0.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1eab8862232baaf756fcd3191c7eef09ad7356dbfe466e1566e02ac3e230071c", size = 4907481, upload-time = "2026-01-20T14:03:54.431Z" }, + { url = "https://files.pythonhosted.org/packages/6c/a5/1d47ed5b93920e8a58a2eb150b8da2e962cffa74ec058047aa69ff0d1acb/pyqwest-0.3.2-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a05593940ce17cae93baa107e9feaad49e04eac6a379b4b712865fe1c26c1492", size = 5261749, upload-time = "2026-01-20T14:03:56.538Z" }, + { url = "https://files.pythonhosted.org/packages/99/23/110730b498d242e1b9a9182ca5b10448c7eacf408578b46483107e701d19/pyqwest-0.3.2-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:950ffd0a077f144fd831f55b0ef6c1d89b74336eae1124bc725d3281c4257883", size = 5301391, upload-time = "2026-01-20T14:03:58.246Z" }, + { url = "https://files.pythonhosted.org/packages/da/99/d9f32eb4972e36dbc602ebbf2e8f11bcea37a5b0756a7a97f958233ee464/pyqwest-0.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:040bbf9552de2bde03537eab44a8938f3016be959640f2837cd295224e345dfa", size = 5453627, upload-time = "2026-01-20T14:04:00.645Z" }, + { url = "https://files.pythonhosted.org/packages/05/16/1dad6928a1fdcb5c98741cbbd3513333d4fbf770ba3ec28b780acf4f3c37/pyqwest-0.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fdd778092468d9301ec4afbb53e2cf8b7cc315e0db4303b76c10ca648dc66608", size = 5607643, upload-time = "2026-01-20T14:04:02.364Z" }, + { url = "https://files.pythonhosted.org/packages/98/3b/fd8846491171077fcc131e7a493f799da7f6c679f55dfe45d20f25a8a0cb/pyqwest-0.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:0709132c0db3fdea18e013f358022b5895133175012e4368c439c13e293361bd", size = 4521483, upload-time = "2026-01-20T14:04:04.03Z" }, + { url = "https://files.pythonhosted.org/packages/e2/4e/1cc68fb4c0f4a8c0a18da9da712bb66a9816f156c4fb1f0e8efea74c9753/pyqwest-0.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:fc516f24c930e776d89e7f6f451931bdd70eb771a5beeda5b1da8ab4b2284381", size = 4905866, upload-time = "2026-01-20T14:04:05.649Z" }, + { url = "https://files.pythonhosted.org/packages/79/bc/90bcbf64874c4c8e41fffcbe142050e7be0d62365cb9e8f5a8574718b9ed/pyqwest-0.3.2-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ebc45e788fd3fc3aa4fae8e6a9b60fab026f5378be72c417e344e6622e4b70d", size = 5266909, upload-time = "2026-01-20T14:04:07.513Z" }, + { url = "https://files.pythonhosted.org/packages/9c/30/63fa3d345dea4c850bb47bef40775e6bd27410abf15c1a22e54a5c0a55c3/pyqwest-0.3.2-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a1c06f33d5963ed3b2d89a39099d911a7f5d4980a209f2bd95fc033b97e8df", size = 5300040, upload-time = "2026-01-20T14:04:09.308Z" }, + { url = "https://files.pythonhosted.org/packages/5c/47/cd0607f8643dd10c71f6149ef19c381219406e7df650a44781f267de0788/pyqwest-0.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:aa1bf37c67fa47f1321551ef12914c3859108a24ed6f29d34be942478069571e", size = 5455049, upload-time = "2026-01-20T14:04:11.257Z" }, + { url = "https://files.pythonhosted.org/packages/5b/fb/8ce50da0b09e33a02e3848c61cf727ba683cd148c5299e7ad6c7de464d67/pyqwest-0.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:01c784f75f8c172499f6eddf52b2ed73cfe2a46206cf914c6872231ad10e4102", size = 5608654, upload-time = "2026-01-20T14:04:13.414Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d1/112a630bcae0e7539d8b5233f44ea53177b774a0cf8d72ffe8193638e0dd/pyqwest-0.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:d63c81799ba37457df9441e626651ca4e8ae411b9abe2b93ed09b7014aca9ea6", size = 4511867, upload-time = "2026-01-20T14:04:15.777Z" }, + { url = "https://files.pythonhosted.org/packages/31/2f/8859827c870621ebc7278ff4d879b13b3e549592a8741c7a68dc7ea524e6/pyqwest-0.3.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:62936eed31e6f4fc86c0ebc6de9f9b08dc1ae262abffde8fdc50aa75d9bbf829", size = 4916503, upload-time = "2026-01-20T14:04:17.527Z" }, + { url = "https://files.pythonhosted.org/packages/b1/55/2a9e08090a3d691b9f5c11fb13b30912fc94088bdec967eb75841294c303/pyqwest-0.3.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421f3f933fb2048595b23ebc9bc6efe7f84b8cc44a8172617cf7333a3ced9565", size = 5278287, upload-time = "2026-01-20T14:04:19.461Z" }, + { url = "https://files.pythonhosted.org/packages/5e/5f/525626c5cb81f46cb631cb6c10911a99c133a1857f7e06b5a5e27f12f2e7/pyqwest-0.3.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcd1d4365625ed3b47e5dc4df5056af0359cddd117a94c013ebbc830736189cc", size = 5311845, upload-time = "2026-01-20T14:04:21.303Z" }, + { url = "https://files.pythonhosted.org/packages/1f/dc/15451a732ffde6348a43f46c65a66f04e41a0c4dbb8403f780e1930addce/pyqwest-0.3.2-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ac79b0804cb276b969f23a61f1baab2d0265fbdcf0bb75d594fd5aa3a70fc3b8", size = 5468724, upload-time = "2026-01-20T14:04:23.874Z" }, + { url = "https://files.pythonhosted.org/packages/26/48/9d561081236d41b03265de36339d46065d4a98bd05edf866f8ace87d847a/pyqwest-0.3.2-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:209aa8b3035734655a66da871eaab5d75a0cde6cb80b0c34967fe93743ba234b", size = 5615617, upload-time = "2026-01-20T14:04:25.628Z" }, + { url = "https://files.pythonhosted.org/packages/b9/7c/3af27a141582c3f50c421616758fd2de648d8b9348b6cbdcaeb4f800723f/pyqwest-0.3.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:3c6839e8f795a07352f553d57b7a040eaee8b1e8884b7b7273ae8353538d0074", size = 4521476, upload-time = "2026-01-20T14:04:27.561Z" }, ] [[package]]