From 2e554627b4b6e99aef131a90c808ac8a8e6d13bf Mon Sep 17 00:00:00 2001
From: "J. Nick Koston"
Date: Sat, 24 May 2025 17:59:43 -0500
Subject: [PATCH 01/13] Increment version to 3.12.0.dev0 (#10998)
---
aiohttp/__init__.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index bd797bcf6ef..4bc6a3a2b22 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.12.0"
+__version__ = "3.12.0.dev0"
from typing import TYPE_CHECKING, Tuple
From ee8f1c414e39001499f03b0a64510a4ae02cbf8e Mon Sep 17 00:00:00 2001
From: "J. Nick Koston"
Date: Mon, 26 May 2025 01:15:47 -0500
Subject: [PATCH 02/13] [PR #11017/1c01726 backport][3.12] Support Reusable
Request Bodies and Improve Payload Handling (#11018)
---
CHANGES/11017.feature.rst | 3 +
CHANGES/5530.feature.rst | 1 +
CHANGES/5577.feature.rst | 1 +
CHANGES/9201.feature.rst | 1 +
CONTRIBUTORS.txt | 1 +
aiohttp/client.py | 12 +
aiohttp/client_middleware_digest_auth.py | 14 +-
aiohttp/client_reqrep.py | 194 ++++--
aiohttp/formdata.py | 5 +-
aiohttp/multipart.py | 71 ++
aiohttp/payload.py | 377 +++++++++--
aiohttp/web_response.py | 4 +
docs/client_reference.rst | 91 +++
tests/test_client_functional.py | 546 +++++++++++++++-
tests/test_client_middleware.py | 108 ++++
tests/test_client_middleware_digest_auth.py | 49 +-
tests/test_client_request.py | 478 +++++++++++++-
tests/test_client_session.py | 10 +-
tests/test_formdata.py | 176 ++++-
tests/test_multipart.py | 201 +++++-
tests/test_payload.py | 675 +++++++++++++++++++-
21 files changed, 2864 insertions(+), 154 deletions(-)
create mode 100644 CHANGES/11017.feature.rst
create mode 120000 CHANGES/5530.feature.rst
create mode 120000 CHANGES/5577.feature.rst
create mode 120000 CHANGES/9201.feature.rst
diff --git a/CHANGES/11017.feature.rst b/CHANGES/11017.feature.rst
new file mode 100644
index 00000000000..361c56e3fe8
--- /dev/null
+++ b/CHANGES/11017.feature.rst
@@ -0,0 +1,3 @@
+Added support for reusable request bodies to enable retries, redirects, and digest authentication -- by :user:`bdraco` and :user:`GLGDLY`.
+
+Most payloads can now be safely reused multiple times, fixing long-standing issues where POST requests with form data or file uploads would fail on redirects with errors like "Form data has been processed already" or "I/O operation on closed file". This also enables digest authentication to work with request bodies and allows retry mechanisms to resend requests without consuming the payload. Note that payloads derived from async iterables may still not be reusable in some cases.
diff --git a/CHANGES/5530.feature.rst b/CHANGES/5530.feature.rst
new file mode 120000
index 00000000000..63bf4429e55
--- /dev/null
+++ b/CHANGES/5530.feature.rst
@@ -0,0 +1 @@
+11017.feature.rst
\ No newline at end of file
diff --git a/CHANGES/5577.feature.rst b/CHANGES/5577.feature.rst
new file mode 120000
index 00000000000..63bf4429e55
--- /dev/null
+++ b/CHANGES/5577.feature.rst
@@ -0,0 +1 @@
+11017.feature.rst
\ No newline at end of file
diff --git a/CHANGES/9201.feature.rst b/CHANGES/9201.feature.rst
new file mode 120000
index 00000000000..63bf4429e55
--- /dev/null
+++ b/CHANGES/9201.feature.rst
@@ -0,0 +1 @@
+11017.feature.rst
\ No newline at end of file
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index 59edfd7ac3f..2e2ab140122 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -136,6 +136,7 @@ Frederik Gladhorn
Frederik Peter Aalund
Gabriel Tremblay
Gang Ji
+Gary Leung
Gary Wilson Jr.
Gennady Andreyev
Georges Dubus
diff --git a/aiohttp/client.py b/aiohttp/client.py
index 811c8f97588..3b2cd2796cc 100644
--- a/aiohttp/client.py
+++ b/aiohttp/client.py
@@ -792,6 +792,8 @@ async def _connect_and_send_request(
redirects += 1
history.append(resp)
if max_redirects and redirects >= max_redirects:
+ if req._body is not None:
+ await req._body.close()
resp.close()
raise TooManyRedirects(
history[0].request_info, tuple(history)
@@ -823,6 +825,9 @@ async def _connect_and_send_request(
r_url, encoded=not self._requote_redirect_url
)
except ValueError as e:
+ if req._body is not None:
+ await req._body.close()
+ resp.close()
raise InvalidUrlRedirectClientError(
r_url,
"Server attempted redirecting to a location that does not look like a URL",
@@ -830,6 +835,8 @@ async def _connect_and_send_request(
scheme = parsed_redirect_url.scheme
if scheme not in HTTP_AND_EMPTY_SCHEMA_SET:
+ if req._body is not None:
+ await req._body.close()
resp.close()
raise NonHttpUrlRedirectClientError(r_url)
elif not scheme:
@@ -838,6 +845,9 @@ async def _connect_and_send_request(
try:
redirect_origin = parsed_redirect_url.origin()
except ValueError as origin_val_err:
+ if req._body is not None:
+ await req._body.close()
+ resp.close()
raise InvalidUrlRedirectClientError(
parsed_redirect_url,
"Invalid redirect URL origin",
@@ -854,6 +864,8 @@ async def _connect_and_send_request(
break
+ if req._body is not None:
+ await req._body.close()
# check response status
if raise_for_status is None:
raise_for_status = self._raise_for_status
diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py
index b63efaf0142..9a8ffc18313 100644
--- a/aiohttp/client_middleware_digest_auth.py
+++ b/aiohttp/client_middleware_digest_auth.py
@@ -29,6 +29,7 @@
from .client_exceptions import ClientError
from .client_middlewares import ClientHandlerType
from .client_reqrep import ClientRequest, ClientResponse
+from .payload import Payload
class DigestAuthChallenge(TypedDict, total=False):
@@ -192,7 +193,7 @@ def __init__(
self._nonce_count = 0
self._challenge: DigestAuthChallenge = {}
- def _encode(self, method: str, url: URL, body: Union[bytes, str]) -> str:
+ async def _encode(self, method: str, url: URL, body: Union[bytes, Payload]) -> str:
"""
Build digest authorization header for the current challenge.
@@ -207,6 +208,7 @@ def _encode(self, method: str, url: URL, body: Union[bytes, str]) -> str:
Raises:
ClientError: If the challenge is missing required parameters or
contains unsupported values
+
"""
challenge = self._challenge
if "realm" not in challenge:
@@ -272,11 +274,11 @@ def KD(s: bytes, d: bytes) -> bytes:
A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes))
A2 = f"{method.upper()}:{path}".encode()
if qop == "auth-int":
- if isinstance(body, str):
- entity_str = body.encode("utf-8", errors="replace")
+ if isinstance(body, bytes): # will always be empty bytes unless Payload
+ entity_bytes = body
else:
- entity_str = body
- entity_hash = H(entity_str)
+ entity_bytes = await body.as_bytes() # Get bytes from Payload
+ entity_hash = H(entity_bytes)
A2 = b":".join((A2, entity_hash))
HA1 = H(A1)
@@ -398,7 +400,7 @@ async def __call__(
for retry_count in range(2):
# Apply authorization header if we have a challenge (on second attempt)
if retry_count > 0:
- request.headers[hdrs.AUTHORIZATION] = self._encode(
+ request.headers[hdrs.AUTHORIZATION] = await self._encode(
request.method, request.url, request.body
)
diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py
index fb83eefd51f..2322a1d7472 100644
--- a/aiohttp/client_reqrep.py
+++ b/aiohttp/client_reqrep.py
@@ -252,6 +252,25 @@ def _is_expected_content_type(
return expected_content_type in response_content_type
+def _warn_if_unclosed_payload(payload: payload.Payload, stacklevel: int = 2) -> None:
+ """Warn if the payload is not closed.
+
+ Callers must check that the body is a Payload before calling this method.
+
+ Args:
+ payload: The payload to check
+ stacklevel: Stack level for the warning (default 2 for direct callers)
+ """
+ if not payload.autoclose and not payload.consumed:
+ warnings.warn(
+ "The previous request body contains unclosed resources. "
+ "Use await request.update_body() instead of setting request.body "
+ "directly to properly close resources and avoid leaks.",
+ ResourceWarning,
+ stacklevel=stacklevel,
+ )
+
+
class ClientRequest:
GET_METHODS = {
hdrs.METH_GET,
@@ -268,7 +287,7 @@ class ClientRequest:
}
# Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
- body: Any = b""
+ _body: Union[None, payload.Payload] = None
auth = None
response = None
@@ -439,6 +458,36 @@ def host(self) -> str:
def port(self) -> Optional[int]:
return self.url.port
+ @property
+ def body(self) -> Union[bytes, payload.Payload]:
+ """Request body."""
+ # empty body is represented as bytes for backwards compatibility
+ return self._body or b""
+
+ @body.setter
+ def body(self, value: Any) -> None:
+ """Set request body with warning for non-autoclose payloads.
+
+ WARNING: This setter must be called from within an event loop and is not
+ thread-safe. Setting body outside of an event loop may raise RuntimeError
+ when closing file-based payloads.
+
+ DEPRECATED: Direct assignment to body is deprecated and will be removed
+ in a future version. Use await update_body() instead for proper resource
+ management.
+ """
+ # Close existing payload if present
+ if self._body is not None:
+ # Warn if the payload needs manual closing
+ # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload
+ _warn_if_unclosed_payload(self._body, stacklevel=3)
+ # NOTE: In the future, when we remove sync close support,
+ # this setter will need to be removed and only the async
+ # update_body() method will be available. For now, we call
+ # _close() for backwards compatibility.
+ self._body._close()
+ self._update_body(value)
+
@property
def request_info(self) -> RequestInfo:
headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
@@ -590,9 +639,12 @@ def update_transfer_encoding(self) -> None:
)
self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
- else:
- if hdrs.CONTENT_LENGTH not in self.headers:
- self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
+ elif (
+ self._body is not None
+ and hdrs.CONTENT_LENGTH not in self.headers
+ and (size := self._body.size) is not None
+ ):
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
"""Set basic auth."""
@@ -610,37 +662,120 @@ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> Non
self.headers[hdrs.AUTHORIZATION] = auth.encode()
- def update_body_from_data(self, body: Any) -> None:
+ def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None:
+ """Update request body from data."""
+ if self._body is not None:
+ _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel)
+
if body is None:
+ self._body = None
return
# FormData
- if isinstance(body, FormData):
- body = body()
+ maybe_payload = body() if isinstance(body, FormData) else body
try:
- body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
+ body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None)
except payload.LookupError:
- body = FormData(body)()
-
- self.body = body
+ body_payload = FormData(maybe_payload)() # type: ignore[arg-type]
+ self._body = body_payload
# enable chunked encoding if needed
if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
- if (size := body.size) is not None:
+ if (size := body_payload.size) is not None:
self.headers[hdrs.CONTENT_LENGTH] = str(size)
else:
self.chunked = True
# copy payload headers
- assert body.headers
+ assert body_payload.headers
headers = self.headers
skip_headers = self._skip_auto_headers
- for key, value in body.headers.items():
+ for key, value in body_payload.headers.items():
if key in headers or (skip_headers is not None and key in skip_headers):
continue
headers[key] = value
+ def _update_body(self, body: Any) -> None:
+ """Update request body after its already been set."""
+ # Remove existing Content-Length header since body is changing
+ if hdrs.CONTENT_LENGTH in self.headers:
+ del self.headers[hdrs.CONTENT_LENGTH]
+
+ # Remove existing Transfer-Encoding header to avoid conflicts
+ if self.chunked and hdrs.TRANSFER_ENCODING in self.headers:
+ del self.headers[hdrs.TRANSFER_ENCODING]
+
+ # Now update the body using the existing method
+ # Called from _update_body, add 1 to stacklevel from caller
+ self.update_body_from_data(body, _stacklevel=4)
+
+ # Update transfer encoding headers if needed (same logic as __init__)
+ if body is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
+
+ async def update_body(self, body: Any) -> None:
+ """
+ Update request body and close previous payload if needed.
+
+ This method safely updates the request body by first closing any existing
+ payload to prevent resource leaks, then setting the new body.
+
+ IMPORTANT: Always use this method instead of setting request.body directly.
+ Direct assignment to request.body will leak resources if the previous body
+ contains file handles, streams, or other resources that need cleanup.
+
+ Args:
+ body: The new body content. Can be:
+ - bytes/bytearray: Raw binary data
+ - str: Text data (will be encoded using charset from Content-Type)
+ - FormData: Form data that will be encoded as multipart/form-data
+ - Payload: A pre-configured payload object
+ - AsyncIterable: An async iterable of bytes chunks
+ - File-like object: Will be read and sent as binary data
+ - None: Clears the body
+
+ Usage:
+ # CORRECT: Use update_body
+ await request.update_body(b"new request data")
+
+ # WRONG: Don't set body directly
+ # request.body = b"new request data" # This will leak resources!
+
+ # Update with form data
+ form_data = FormData()
+ form_data.add_field('field', 'value')
+ await request.update_body(form_data)
+
+ # Clear body
+ await request.update_body(None)
+
+ Note:
+ This method is async because it may need to close file handles or
+ other resources associated with the previous payload. Always await
+ this method to ensure proper cleanup.
+
+ Warning:
+ Setting request.body directly is highly discouraged and can lead to:
+ - Resource leaks (unclosed file handles, streams)
+ - Memory leaks (unreleased buffers)
+ - Unexpected behavior with streaming payloads
+
+ It is not recommended to change the payload type in middleware. If the
+ body was already set (e.g., as bytes), it's best to keep the same type
+ rather than converting it (e.g., to str) as this may result in unexpected
+ behavior.
+
+ See Also:
+ - update_body_from_data: Synchronous body update without cleanup
+ - body property: Direct body access (STRONGLY DISCOURAGED)
+
+ """
+ # Close existing payload if it exists and needs closing
+ if self._body is not None:
+ await self._body.close()
+ self._update_body(body)
+
def update_expect_continue(self, expect: bool = False) -> None:
if expect:
self.headers[hdrs.EXPECT] = "100-continue"
@@ -717,27 +852,14 @@ async def write_bytes(
protocol = conn.protocol
assert protocol is not None
try:
- if isinstance(self.body, payload.Payload):
- # Specialized handling for Payload objects that know how to write themselves
- await self.body.write_with_length(writer, content_length)
- else:
- # Handle bytes/bytearray by converting to an iterable for consistent handling
- if isinstance(self.body, (bytes, bytearray)):
- self.body = (self.body,)
-
- if content_length is None:
- # Write the entire body without length constraint
- for chunk in self.body:
- await writer.write(chunk)
- else:
- # Write with length constraint, respecting content_length limit
- # If the body is larger than content_length, we truncate it
- remaining_bytes = content_length
- for chunk in self.body:
- await writer.write(chunk[:remaining_bytes])
- remaining_bytes -= len(chunk)
- if remaining_bytes <= 0:
- break
+ # This should be a rare case but the
+ # self._body can be set to None while
+ # the task is being started or we wait above
+ # for the 100-continue response.
+ # The more likely case is we have an empty
+ # payload, but 100-continue is still expected.
+ if self._body is not None:
+ await self._body.write_with_length(writer, content_length)
except OSError as underlying_exc:
reraised_exc = underlying_exc
@@ -833,7 +955,7 @@ async def send(self, conn: "Connection") -> "ClientResponse":
await writer.write_headers(status_line, self.headers)
task: Optional["asyncio.Task[None]"]
- if self.body or self._continue is not None or protocol.writing_paused:
+ if self._body or self._continue is not None or protocol.writing_paused:
coro = self.write_bytes(writer, conn, self._get_content_length())
if sys.version_info >= (3, 12):
# Optimization for Python 3.12, try to write
diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py
index 73056f4bc45..bdf591fae7a 100644
--- a/aiohttp/formdata.py
+++ b/aiohttp/formdata.py
@@ -29,7 +29,6 @@ def __init__(
self._writer = multipart.MultipartWriter("form-data")
self._fields: List[Any] = []
self._is_multipart = default_to_multipart
- self._is_processed = False
self._quote_fields = quote_fields
self._charset = charset
@@ -140,8 +139,6 @@ def _gen_form_urlencoded(self) -> payload.BytesPayload:
def _gen_form_data(self) -> multipart.MultipartWriter:
"""Encode a list of fields using the multipart/form-data MIME format"""
- if self._is_processed:
- raise RuntimeError("Form data has been processed already")
for dispparams, headers, value in self._fields:
try:
if hdrs.CONTENT_TYPE in headers:
@@ -172,7 +169,7 @@ def _gen_form_data(self) -> multipart.MultipartWriter:
self._writer.append_payload(part)
- self._is_processed = True
+ self._fields.clear()
return self._writer
def __call__(self) -> Payload:
diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py
index 459cc321a1d..231c67c7bb7 100644
--- a/aiohttp/multipart.py
+++ b/aiohttp/multipart.py
@@ -36,6 +36,7 @@
)
from .helpers import CHAR, TOKEN, parse_mimetype, reify
from .http import HeadersParser
+from .log import internal_logger
from .payload import (
JsonPayload,
LookupError,
@@ -559,6 +560,7 @@ def filename(self) -> Optional[str]:
@payload_type(BodyPartReader, order=Order.try_first)
class BodyPartReaderPayload(Payload):
_value: BodyPartReader
+ # _autoclose = False (inherited) - Streaming reader that may have resources
def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
super().__init__(value, *args, **kwargs)
@@ -575,6 +577,16 @@ def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None:
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
raise TypeError("Unable to decode.")
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """Raises TypeError as body parts should be consumed via write().
+
+ This is intentional: BodyPartReader payloads are designed for streaming
+ large data (potentially gigabytes) and must be consumed only once via
+ the write() method to avoid memory exhaustion. They cannot be buffered
+ in memory for reuse.
+ """
+ raise TypeError("Unable to read body part as bytes. Use write() to consume.")
+
async def write(self, writer: Any) -> None:
field = self._value
chunk = await field.read_chunk(size=2**16)
@@ -793,6 +805,8 @@ class MultipartWriter(Payload):
"""Multipart body writer."""
_value: None
+ # _consumed = False (inherited) - Can be encoded multiple times
+ _autoclose = True # No file handles, just collects parts in memory
def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None:
boundary = boundary if boundary is not None else uuid.uuid4().hex
@@ -975,6 +989,11 @@ def size(self) -> Optional[int]:
return total
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """Return string representation of the multipart data.
+
+ WARNING: This method may do blocking I/O if parts contain file payloads.
+ It should not be called in the event loop. Use as_bytes().decode() instead.
+ """
return "".join(
"--"
+ self.boundary
@@ -984,6 +1003,33 @@ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
for part, _e, _te in self._parts
)
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """Return bytes representation of the multipart data.
+
+ This method is async-safe and calls as_bytes on underlying payloads.
+ """
+ parts: List[bytes] = []
+
+ # Process each part
+ for part, _e, _te in self._parts:
+ # Add boundary
+ parts.append(b"--" + self._boundary + b"\r\n")
+
+ # Add headers
+ parts.append(part._binary_headers)
+
+ # Add payload content using as_bytes for async safety
+ part_bytes = await part.as_bytes(encoding, errors)
+ parts.append(part_bytes)
+
+ # Add trailing CRLF
+ parts.append(b"\r\n")
+
+ # Add closing boundary
+ parts.append(b"--" + self._boundary + b"--\r\n")
+
+ return b"".join(parts)
+
async def write(self, writer: Any, close_boundary: bool = True) -> None:
"""Write body."""
for part, encoding, te_encoding in self._parts:
@@ -1011,6 +1057,31 @@ async def write(self, writer: Any, close_boundary: bool = True) -> None:
if close_boundary:
await writer.write(b"--" + self._boundary + b"--\r\n")
+ async def close(self) -> None:
+ """
+ Close all part payloads that need explicit closing.
+
+ IMPORTANT: This method must not await anything that might not finish
+ immediately, as it may be called during cleanup/cancellation. Schedule
+ any long-running operations without awaiting them.
+ """
+ if self._consumed:
+ return
+ self._consumed = True
+
+ # Close all parts that need explicit closing
+ # We catch and log exceptions to ensure all parts get a chance to close
+ # we do not use asyncio.gather() here because we are not allowed
+ # to suspend given we may be called during cleanup
+ for idx, (part, _, _) in enumerate(self._parts):
+ if not part.autoclose and not part.consumed:
+ try:
+ await part.close()
+ except Exception as exc:
+ internal_logger.error(
+ "Failed to close multipart part %d: %s", idx, exc, exc_info=True
+ )
+
class MultipartPayloadWriter:
def __init__(self, writer: Any) -> None:
diff --git a/aiohttp/payload.py b/aiohttp/payload.py
index c954091adad..4a2c7922337 100644
--- a/aiohttp/payload.py
+++ b/aiohttp/payload.py
@@ -15,6 +15,7 @@
Dict,
Final,
Iterable,
+ List,
Optional,
Set,
TextIO,
@@ -58,12 +59,8 @@
_CLOSE_FUTURES: Set[asyncio.Future[None]] = set()
-if TYPE_CHECKING:
- from typing import List
-
-
class LookupError(Exception):
- pass
+ """Raised when no payload factory is found for the given data type."""
class Order(str, enum.Enum):
@@ -155,6 +152,8 @@ class Payload(ABC):
_default_content_type: str = "application/octet-stream"
_size: Optional[int] = None
+ _consumed: bool = False # Default: payload has not been consumed yet
+ _autoclose: bool = False # Default: assume resource needs explicit closing
def __init__(
self,
@@ -189,7 +188,12 @@ def __init__(
@property
def size(self) -> Optional[int]:
- """Size of the payload."""
+ """Size of the payload in bytes.
+
+ Returns the number of bytes that will be transmitted when the payload
+ is written. For string payloads, this is the size after encoding to bytes,
+ not the length of the string.
+ """
return self._size
@property
@@ -221,6 +225,21 @@ def content_type(self) -> str:
"""Content type"""
return self._headers[hdrs.CONTENT_TYPE]
+ @property
+ def consumed(self) -> bool:
+ """Whether the payload has been consumed and cannot be reused."""
+ return self._consumed
+
+ @property
+ def autoclose(self) -> bool:
+ """
+ Whether the payload can close itself automatically.
+
+ Returns True if the payload has no file handles or resources that need
+ explicit closing. If False, callers must await close() to release resources.
+ """
+ return self._autoclose
+
def set_content_disposition(
self,
disptype: str,
@@ -235,14 +254,16 @@ def set_content_disposition(
@abstractmethod
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
- """Return string representation of the value.
+ """
+ Return string representation of the value.
This is named decode() to allow compatibility with bytes objects.
"""
@abstractmethod
async def write(self, writer: AbstractStreamWriter) -> None:
- """Write payload to the writer stream.
+ """
+ Write payload to the writer stream.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
@@ -256,6 +277,7 @@ async def write(self, writer: AbstractStreamWriter) -> None:
All payload subclasses must override this method for backwards compatibility,
but new code should use write_with_length for more flexibility and control.
+
"""
# write_with_length is new in aiohttp 3.12
@@ -283,9 +305,52 @@ async def write_with_length(
# and for the default implementation
await self.write(writer)
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This is a convenience method that calls decode() and encodes the result
+ to bytes using the specified encoding.
+ """
+ # Use instance encoding if available, otherwise use parameter
+ actual_encoding = self._encoding or encoding
+ return self.decode(actual_encoding, errors).encode(actual_encoding)
+
+ def _close(self) -> None:
+ """
+ Async safe synchronous close operations for backwards compatibility.
+
+ This method exists only for backwards compatibility with code that
+ needs to clean up payloads synchronously. In the future, we will
+ drop this method and only support the async close() method.
+
+ WARNING: This method must be safe to call from within the event loop
+ without blocking. Subclasses should not perform any blocking I/O here.
+
+ WARNING: This method must be called from within an event loop for
+ certain payload types (e.g., IOBasePayload). Calling it outside an
+ event loop may raise RuntimeError.
+ """
+ # This is a no-op by default, but subclasses can override it
+ # for non-blocking cleanup operations.
+
+ async def close(self) -> None:
+ """
+ Close the payload if it holds any resources.
+
+ IMPORTANT: This method must not await anything that might not finish
+ immediately, as it may be called during cleanup/cancellation. Schedule
+ any long-running operations without awaiting them.
+
+ In the future, this will be the only close method supported.
+ """
+ self._close()
+
class BytesPayload(Payload):
_value: bytes
+ # _consumed = False (inherited) - Bytes are immutable and can be reused
+ _autoclose = True # No file handle, just bytes in memory
def __init__(
self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any
@@ -315,8 +380,18 @@ def __init__(
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
return self._value.decode(encoding, errors)
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method returns the raw bytes content of the payload.
+ It is equivalent to accessing the _value attribute directly.
+ """
+ return self._value
+
async def write(self, writer: AbstractStreamWriter) -> None:
- """Write the entire bytes payload to the writer stream.
+ """
+ Write the entire bytes payload to the writer stream.
Args:
writer: An AbstractStreamWriter instance that handles the actual writing
@@ -327,6 +402,7 @@ async def write(self, writer: AbstractStreamWriter) -> None:
For new implementations that need length control, use write_with_length().
This method is maintained for backwards compatibility and is equivalent
to write_with_length(writer, None).
+
"""
await writer.write(self._value)
@@ -389,6 +465,9 @@ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None:
class IOBasePayload(Payload):
_value: io.IOBase
+ # _consumed = False (inherited) - File can be re-read from the same position
+ _start_position: Optional[int] = None
+ # _autoclose = False (inherited) - Has file handle that needs explicit closing
def __init__(
self, value: IO[Any], disposition: str = "attachment", *args: Any, **kwargs: Any
@@ -402,6 +481,16 @@ def __init__(
if hdrs.CONTENT_DISPOSITION not in self.headers:
self.set_content_disposition(disposition, filename=self._filename)
+ def _set_or_restore_start_position(self) -> None:
+ """Set or restore the start position of the file-like object."""
+ if self._start_position is None:
+ try:
+ self._start_position = self._value.tell()
+ except OSError:
+ self._consumed = True # Cannot seek, mark as consumed
+ return
+ self._value.seek(self._start_position)
+
def _read_and_available_len(
self, remaining_content_len: Optional[int]
) -> Tuple[Optional[int], bytes]:
@@ -422,6 +511,7 @@ def _read_and_available_len(
context switches and file operations when streaming content.
"""
+ self._set_or_restore_start_position()
size = self.size # Call size only once since it does I/O
return size, self._value.read(
min(size or READ_SIZE, remaining_content_len or READ_SIZE)
@@ -447,6 +537,12 @@ def _read(self, remaining_content_len: Optional[int]) -> bytes:
@property
def size(self) -> Optional[int]:
+ """
+ Size of the payload in bytes.
+
+ Returns the number of bytes remaining to be read from the file.
+ Returns None if the size cannot be determined (e.g., for unseekable streams).
+ """
try:
return os.fstat(self._value.fileno()).st_size - self._value.tell()
except (AttributeError, OSError):
@@ -497,38 +593,31 @@ async def write_with_length(
total_written_len = 0
remaining_content_len = content_length
- try:
- # Get initial data and available length
- available_len, chunk = await loop.run_in_executor(
- None, self._read_and_available_len, remaining_content_len
- )
- # Process data chunks until done
- while chunk:
- chunk_len = len(chunk)
+ # Get initial data and available length
+ available_len, chunk = await loop.run_in_executor(
+ None, self._read_and_available_len, remaining_content_len
+ )
+ # Process data chunks until done
+ while chunk:
+ chunk_len = len(chunk)
- # Write data with or without length constraint
- if remaining_content_len is None:
- await writer.write(chunk)
- else:
- await writer.write(chunk[:remaining_content_len])
- remaining_content_len -= chunk_len
+ # Write data with or without length constraint
+ if remaining_content_len is None:
+ await writer.write(chunk)
+ else:
+ await writer.write(chunk[:remaining_content_len])
+ remaining_content_len -= chunk_len
- total_written_len += chunk_len
+ total_written_len += chunk_len
- # Check if we're done writing
- if self._should_stop_writing(
- available_len, total_written_len, remaining_content_len
- ):
- return
+ # Check if we're done writing
+ if self._should_stop_writing(
+ available_len, total_written_len, remaining_content_len
+ ):
+ return
- # Read next chunk
- chunk = await loop.run_in_executor(
- None, self._read, remaining_content_len
- )
- finally:
- # Handle closing the file without awaiting to prevent cancellation issues
- # when the StreamReader reaches EOF
- self._schedule_file_close(loop)
+ # Read next chunk
+ chunk = await loop.run_in_executor(None, self._read, remaining_content_len)
def _should_stop_writing(
self,
@@ -554,20 +643,67 @@ def _should_stop_writing(
remaining_content_len is not None and remaining_content_len <= 0
)
- def _schedule_file_close(self, loop: asyncio.AbstractEventLoop) -> None:
- """Schedule file closing without awaiting to prevent cancellation issues."""
+ def _close(self) -> None:
+ """
+ Async safe synchronous close operations for backwards compatibility.
+
+ This method exists only for backwards
+ compatibility. Use the async close() method instead.
+
+ WARNING: This method MUST be called from within an event loop.
+ Calling it outside an event loop will raise RuntimeError.
+ """
+ # Skip if already consumed
+ if self._consumed:
+ return
+ self._consumed = True # Mark as consumed to prevent further writes
+ # Schedule file closing without awaiting to prevent cancellation issues
+ loop = asyncio.get_running_loop()
close_future = loop.run_in_executor(None, self._value.close)
# Hold a strong reference to the future to prevent it from being
# garbage collected before it completes.
_CLOSE_FUTURES.add(close_future)
close_future.add_done_callback(_CLOSE_FUTURES.remove)
+ async def close(self) -> None:
+ """
+ Close the payload if it holds any resources.
+
+ IMPORTANT: This method must not await anything that might not finish
+ immediately, as it may be called during cleanup/cancellation. Schedule
+ any long-running operations without awaiting them.
+ """
+ self._close()
+
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
- return "".join(r.decode(encoding, errors) for r in self._value.readlines())
+ """
+ Return string representation of the value.
+
+ WARNING: This method does blocking I/O and should not be called in the event loop.
+ """
+ return self._read_all().decode(encoding, errors)
+
+ def _read_all(self) -> bytes:
+ """Read the entire file-like object and return its content as bytes."""
+ self._set_or_restore_start_position()
+ # Use readlines() to ensure we get all content
+ return b"".join(self._value.readlines())
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method reads the entire file content and returns it as bytes.
+ It is equivalent to reading the file-like object directly.
+ The file reading is performed in an executor to avoid blocking the event loop.
+ """
+ loop = asyncio.get_running_loop()
+ return await loop.run_in_executor(None, self._read_all)
class TextIOPayload(IOBasePayload):
_value: io.TextIOBase
+ # _autoclose = False (inherited) - Has text file handle that needs explicit closing
def __init__(
self,
@@ -621,6 +757,7 @@ def _read_and_available_len(
to the stream. If no encoding is specified, UTF-8 is used as the default.
"""
+ self._set_or_restore_start_position()
size = self.size
chunk = self._value.read(
min(size or READ_SIZE, remaining_content_len or READ_SIZE)
@@ -649,20 +786,56 @@ def _read(self, remaining_content_len: Optional[int]) -> bytes:
return chunk.encode(self._encoding) if self._encoding else chunk.encode()
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """
+ Return string representation of the value.
+
+ WARNING: This method does blocking I/O and should not be called in the event loop.
+ """
+ self._set_or_restore_start_position()
return self._value.read()
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method reads the entire text file content and returns it as bytes.
+ It encodes the text content using the specified encoding.
+ The file reading is performed in an executor to avoid blocking the event loop.
+ """
+ loop = asyncio.get_running_loop()
+
+ # Use instance encoding if available, otherwise use parameter
+ actual_encoding = self._encoding or encoding
+
+ def _read_and_encode() -> bytes:
+ self._set_or_restore_start_position()
+ # TextIO read() always returns the full content
+ return self._value.read().encode(actual_encoding, errors)
+
+ return await loop.run_in_executor(None, _read_and_encode)
+
class BytesIOPayload(IOBasePayload):
_value: io.BytesIO
+ _size: int # Always initialized in __init__
+ _autoclose = True # BytesIO is in-memory, safe to auto-close
+
+ def __init__(self, value: io.BytesIO, *args: Any, **kwargs: Any) -> None:
+ super().__init__(value, *args, **kwargs)
+ # Calculate size once during initialization
+ self._size = len(self._value.getbuffer()) - self._value.tell()
@property
def size(self) -> int:
- position = self._value.tell()
- end = self._value.seek(0, os.SEEK_END)
- self._value.seek(position)
- return end - position
+ """Size of the payload in bytes.
+
+ Returns the number of bytes in the BytesIO buffer that will be transmitted.
+ This is calculated once during initialization for efficiency.
+ """
+ return self._size
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ self._set_or_restore_start_position()
return self._value.read().decode(encoding, errors)
async def write(self, writer: AbstractStreamWriter) -> None:
@@ -690,32 +863,49 @@ async def write_with_length(
responsiveness when processing large in-memory buffers.
"""
+ self._set_or_restore_start_position()
loop_count = 0
remaining_bytes = content_length
- try:
- while chunk := self._value.read(READ_SIZE):
- if loop_count > 0:
- # Avoid blocking the event loop
- # if they pass a large BytesIO object
- # and we are not in the first iteration
- # of the loop
- await asyncio.sleep(0)
- if remaining_bytes is None:
- await writer.write(chunk)
- else:
- await writer.write(chunk[:remaining_bytes])
- remaining_bytes -= len(chunk)
- if remaining_bytes <= 0:
- return
- loop_count += 1
- finally:
- self._value.close()
+ while chunk := self._value.read(READ_SIZE):
+ if loop_count > 0:
+ # Avoid blocking the event loop
+ # if they pass a large BytesIO object
+ # and we are not in the first iteration
+ # of the loop
+ await asyncio.sleep(0)
+ if remaining_bytes is None:
+ await writer.write(chunk)
+ else:
+ await writer.write(chunk[:remaining_bytes])
+ remaining_bytes -= len(chunk)
+ if remaining_bytes <= 0:
+ return
+ loop_count += 1
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method reads the entire BytesIO content and returns it as bytes.
+ It is equivalent to accessing the _value attribute directly.
+ """
+ self._set_or_restore_start_position()
+ return self._value.read()
+
+ async def close(self) -> None:
+ """
+ Close the BytesIO payload.
+
+ This does nothing since BytesIO is in-memory and does not require explicit closing.
+ """
class BufferedReaderPayload(IOBasePayload):
_value: io.BufferedIOBase
+ # _autoclose = False (inherited) - Has buffered file handle that needs explicit closing
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ self._set_or_restore_start_position()
return self._value.read().decode(encoding, errors)
@@ -755,6 +945,9 @@ class AsyncIterablePayload(Payload):
_iter: Optional[_AsyncIterator] = None
_value: _AsyncIterable
+ _cached_chunks: Optional[List[bytes]] = None
+ # _consumed stays False to allow reuse with cached content
+ _autoclose = True # Iterator doesn't need explicit closing
def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, AsyncIterable):
@@ -800,17 +993,30 @@ async def write_with_length(
This implementation handles streaming of async iterable content with length constraints:
- 1. Iterates through the async iterable one chunk at a time
- 2. Respects content_length constraints when specified
- 3. Handles the case when the iterable might be used twice
-
- Since async iterables are consumed as they're iterated, there is no way to
- restart the iteration if it's already in progress or completed.
+ 1. If cached chunks are available, writes from them
+ 2. Otherwise iterates through the async iterable one chunk at a time
+ 3. Respects content_length constraints when specified
+ 4. Does NOT generate cache - that's done by as_bytes()
"""
+ # If we have cached chunks, use them
+ if self._cached_chunks is not None:
+ remaining_bytes = content_length
+ for chunk in self._cached_chunks:
+ if remaining_bytes is None:
+ await writer.write(chunk)
+ elif remaining_bytes > 0:
+ await writer.write(chunk[:remaining_bytes])
+ remaining_bytes -= len(chunk)
+ else:
+ break
+ return
+
+ # If iterator is exhausted and we don't have cached chunks, nothing to write
if self._iter is None:
return
+ # Stream from the iterator
remaining_bytes = content_length
try:
@@ -832,9 +1038,40 @@ async def write_with_length(
except StopAsyncIteration:
# Iterator is exhausted
self._iter = None
+ self._consumed = True # Mark as consumed when streamed without caching
def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
- raise TypeError("Unable to decode.")
+ """Decode the payload content as a string if cached chunks are available."""
+ if self._cached_chunks is not None:
+ return b"".join(self._cached_chunks).decode(encoding, errors)
+ raise TypeError("Unable to decode - content not cached. Call as_bytes() first.")
+
+ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> bytes:
+ """
+ Return bytes representation of the value.
+
+ This method reads the entire async iterable content and returns it as bytes.
+ It generates and caches the chunks for future reuse.
+ """
+ # If we have cached chunks, return them joined
+ if self._cached_chunks is not None:
+ return b"".join(self._cached_chunks)
+
+ # If iterator is exhausted and no cache, return empty
+ if self._iter is None:
+ return b""
+
+ # Read all chunks and cache them
+ chunks: List[bytes] = []
+ async for chunk in self._iter:
+ chunks.append(chunk)
+
+ # Iterator is exhausted, cache the chunks
+ self._iter = None
+ self._cached_chunks = chunks
+ # Keep _consumed as False to allow reuse with cached chunks
+
+ return b"".join(chunks)
class StreamReaderPayload(AsyncIterablePayload):
@@ -852,5 +1089,5 @@ def __init__(self, value: StreamReader, *args: Any, **kwargs: Any) -> None:
PAYLOAD_REGISTRY.register(IOBasePayload, io.IOBase)
PAYLOAD_REGISTRY.register(StreamReaderPayload, StreamReader)
# try_last for giving a chance to more specialized async interables like
-# multidict.BodyPartReaderPayload override the default
+# multipart.BodyPartReaderPayload override the default
PAYLOAD_REGISTRY.register(AsyncIterablePayload, AsyncIterable, order=Order.try_last)
diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py
index 84ad18e8b4f..cdc90cc4f1c 100644
--- a/aiohttp/web_response.py
+++ b/aiohttp/web_response.py
@@ -724,6 +724,9 @@ def body(self, body: Any) -> None:
def text(self) -> Optional[str]:
if self._body is None:
return None
+ # Note: When _body is a Payload (e.g. FilePayload), this may do blocking I/O
+ # This is generally safe as most common payloads (BytesPayload, StringPayload)
+ # don't do blocking I/O, but be careful with file-based payloads
return self._body.decode(self.charset or "utf-8")
@text.setter
@@ -777,6 +780,7 @@ async def write_eof(self, data: bytes = b"") -> None:
await super().write_eof()
elif isinstance(self._body, Payload):
await self._body.write(self._payload_writer)
+ await self._body.close()
await super().write_eof()
else:
await super().write_eof(cast(bytes, body))
diff --git a/docs/client_reference.rst b/docs/client_reference.rst
index 606df6acc0a..d3c2226aee0 100644
--- a/docs/client_reference.rst
+++ b/docs/client_reference.rst
@@ -1889,6 +1889,26 @@ ClientRequest
- A :class:`Payload` object for raw data (default is empty bytes ``b""``)
- A :class:`FormData` object for form submissions
+ .. danger::
+
+ **DO NOT set this attribute directly!** Direct assignment will cause resource
+ leaks. Always use :meth:`update_body` instead:
+
+ .. code-block:: python
+
+ # WRONG - This will leak resources!
+ request.body = b"new data"
+
+ # CORRECT - Use update_body
+ await request.update_body(b"new data")
+
+ Setting body directly bypasses cleanup of the previous payload, which can
+ leave file handles open, streams unclosed, and buffers unreleased.
+
+ Additionally, setting body directly must be done from within an event loop
+ and is not thread-safe. Setting body outside of an event loop may raise
+ RuntimeError when closing file-based payloads.
+
.. attribute:: chunked
:type: bool | None
@@ -1990,6 +2010,77 @@ ClientRequest
The HTTP version to use for the request (e.g., ``HttpVersion(1, 1)`` for HTTP/1.1).
+ .. method:: update_body(body)
+
+ Update the request body and close any existing payload to prevent resource leaks.
+
+ **This is the ONLY correct way to modify a request body.** Never set the
+ :attr:`body` attribute directly.
+
+ This method is particularly useful in middleware when you need to modify the
+ request body after the request has been created but before it's sent.
+
+ :param body: The new body content. Can be:
+
+ - ``bytes``/``bytearray``: Raw binary data
+ - ``str``: Text data (encoded using charset from Content-Type)
+ - :class:`FormData`: Form data encoded as multipart/form-data
+ - :class:`Payload`: A pre-configured payload object
+ - ``AsyncIterable[bytes]``: Async iterable of bytes chunks
+ - File-like object: Will be read and sent as binary data
+ - ``None``: Clears the body
+
+ .. code-block:: python
+
+ async def middleware(request, handler):
+ # Modify request body in middleware
+ if request.method == 'POST':
+ # CORRECT: Always use update_body
+ await request.update_body(b'{"modified": true}')
+
+ # WRONG: Never set body directly!
+ # request.body = b'{"modified": true}' # This leaks resources!
+
+ # Or add authentication data to form
+ if isinstance(request.body, FormData):
+ form = FormData()
+ # Copy existing fields and add auth token
+ form.add_field('auth_token', 'secret123')
+ await request.update_body(form)
+
+ return await handler(request)
+
+ .. note::
+
+ This method is async because it may need to close file handles or
+ other resources associated with the previous payload. Always await
+ this method to ensure proper cleanup.
+
+ .. danger::
+
+ **Never set :attr:`ClientRequest.body` directly!** Direct assignment will cause resource
+ leaks. Always use this method instead. Setting the body attribute directly:
+
+ - Bypasses cleanup of the previous payload
+ - Leaves file handles and streams open
+ - Can cause memory leaks
+ - May result in unexpected behavior with async iterables
+
+ .. warning::
+
+ When updating the body, ensure that the Content-Type header is
+ appropriate for the new body content. The Content-Length header
+ will be updated automatically. When using :class:`FormData` or
+ :class:`Payload` objects, headers are updated automatically,
+ but you may need to set Content-Type manually for raw bytes or text.
+
+ It is not recommended to change the payload type in middleware. If the
+ body was already set (e.g., as bytes), it's best to keep the same type
+ rather than converting it (e.g., to str) as this may result in unexpected
+ behavior.
+
+ .. versionadded:: 3.12
+
Utilities
diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py
index 29838c39a71..cb4edd3d1e1 100644
--- a/tests/test_client_functional.py
+++ b/tests/test_client_functional.py
@@ -30,7 +30,7 @@
from yarl import URL
import aiohttp
-from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web
+from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, payload, web
from aiohttp.abc import AbstractResolver, ResolveResult
from aiohttp.client_exceptions import (
ClientResponseError,
@@ -45,6 +45,14 @@
from aiohttp.client_reqrep import ClientRequest
from aiohttp.connector import Connection
from aiohttp.http_writer import StreamWriter
+from aiohttp.payload import (
+ AsyncIterablePayload,
+ BufferedReaderPayload,
+ BytesIOPayload,
+ BytesPayload,
+ StringIOPayload,
+ StringPayload,
+)
from aiohttp.pytest_plugin import AiohttpClient, AiohttpServer
from aiohttp.test_utils import TestClient, TestServer, unused_port
from aiohttp.typedefs import Handler
@@ -600,6 +608,61 @@ async def handler(request: web.Request) -> web.Response:
assert 200 == resp.status
+async def test_post_bytes_data_content_length_from_body(
+ aiohttp_client: AiohttpClient,
+) -> None:
+ """Test that Content-Length is set from body payload size when sending bytes."""
+ data = b"test payload data"
+
+ async def handler(request: web.Request) -> web.Response:
+ # Verify Content-Length header was set correctly
+ assert request.content_length == len(data)
+ assert request.headers.get("Content-Length") == str(len(data))
+
+ # Verify we can read the data
+ val = await request.read()
+ assert data == val
+ return web.Response()
+
+ app = web.Application()
+ app.router.add_route("POST", "/", handler)
+ client = await aiohttp_client(app)
+
+ # Send bytes data - this should trigger the code path where
+ # Content-Length is set from body.size in update_transfer_encoding
+ async with client.post("/", data=data) as resp:
+ assert resp.status == 200
+
+
+async def test_post_custom_payload_without_content_length(
+ aiohttp_client: AiohttpClient,
+) -> None:
+ """Test that Content-Length is set from payload.size when not explicitly provided."""
+ data = b"custom payload data"
+
+ async def handler(request: web.Request) -> web.Response:
+ # Verify Content-Length header was set from payload size
+ assert request.content_length == len(data)
+ assert request.headers.get("Content-Length") == str(len(data))
+
+ # Verify we can read the data
+ val = await request.read()
+ assert data == val
+ return web.Response()
+
+ app = web.Application()
+ app.router.add_route("POST", "/", handler)
+ client = await aiohttp_client(app)
+
+ # Create a BytesPayload directly - this ensures we test the path
+ # where update_transfer_encoding sets Content-Length from body.size
+ bytes_payload = payload.BytesPayload(data)
+
+ # Don't set Content-Length header explicitly
+ async with client.post("/", data=bytes_payload) as resp:
+ assert resp.status == 200
+
+
async def test_ssl_client(
aiohttp_server,
ssl_ctx,
@@ -2111,6 +2174,51 @@ async def expect_handler(request):
assert expect_called
+async def test_expect100_with_no_body(aiohttp_client: AiohttpClient) -> None:
+ """Test expect100 with GET request that has no body."""
+
+ async def handler(request: web.Request) -> web.Response:
+ return web.Response(text="OK")
+
+ app = web.Application()
+ app.router.add_get("/", handler)
+ client = await aiohttp_client(app)
+
+ # GET request with expect100=True but no body
+ async with client.get("/", expect100=True) as resp:
+ assert resp.status == 200
+ assert await resp.text() == "OK"
+
+
+async def test_expect100_continue_with_none_payload(
+ aiohttp_client: AiohttpClient,
+) -> None:
+ """Test expect100 continue handling when payload is None from the start."""
+ expect_received = False
+
+ async def handler(request: web.Request) -> web.Response:
+ return web.Response(body=b"OK")
+
+ async def expect_handler(request: web.Request) -> None:
+ nonlocal expect_received
+ expect_received = True
+ # Send 100 Continue
+ assert request.transport is not None
+ request.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
+
+ app = web.Application()
+ app.router.add_post("/", handler, expect_handler=expect_handler)
+ client = await aiohttp_client(app)
+
+ # POST request with expect100=True but no body (data=None)
+ async with client.post("/", expect100=True, data=None) as resp:
+ assert resp.status == 200
+ assert await resp.read() == b"OK"
+
+ # Expect handler should still be called even with no body
+ assert expect_received
+
+
@pytest.mark.usefixtures("parametrize_zlib_backend")
async def test_encoding_deflate(aiohttp_client) -> None:
async def handler(request):
@@ -4502,3 +4610,439 @@ async def handler(request: web.Request) -> web.Response:
data = await resp.read()
assert data == b""
resp.close()
+
+
+async def test_bytes_payload_redirect(aiohttp_client: AiohttpClient) -> None:
+ """Test that BytesPayload can be reused across redirects."""
+ data_received = []
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("redirect", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("final", data))
+ return web.Response(text=f"Received: {data.decode()}")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ payload_data = b"test payload data"
+ payload = BytesPayload(payload_data)
+
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "Received: test payload data"
+ # Both endpoints should have received the data
+ assert data_received == [("redirect", payload_data), ("final", payload_data)]
+
+
+async def test_string_payload_redirect(aiohttp_client: AiohttpClient) -> None:
+ """Test that StringPayload can be reused across redirects."""
+ data_received = []
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ data = await request.text()
+ data_received.append(("redirect", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ data = await request.text()
+ data_received.append(("final", data))
+ return web.Response(text=f"Received: {data}")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ payload_data = "test string payload"
+ payload = StringPayload(payload_data)
+
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "Received: test string payload"
+ # Both endpoints should have received the data
+ assert data_received == [("redirect", payload_data), ("final", payload_data)]
+
+
+async def test_async_iterable_payload_redirect(aiohttp_client: AiohttpClient) -> None:
+ """Test that AsyncIterablePayload cannot be reused across redirects."""
+ data_received = []
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("redirect", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("final", data))
+ return web.Response(text=f"Received: {data.decode()}")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ chunks = [b"chunk1", b"chunk2", b"chunk3"]
+
+ async def async_gen() -> AsyncIterator[bytes]:
+ for chunk in chunks:
+ yield chunk
+
+ payload = AsyncIterablePayload(async_gen())
+
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ text = await resp.text()
+ # AsyncIterablePayload is consumed after first use, so redirect gets empty body
+ assert text == "Received: "
+
+ # Only the first endpoint should have received data
+ expected_data = b"".join(chunks)
+ assert len(data_received) == 2
+ assert data_received[0] == ("redirect", expected_data)
+ assert data_received[1] == ("final", b"") # Empty after being consumed
+
+
+async def test_buffered_reader_payload_redirect(aiohttp_client: AiohttpClient) -> None:
+ """Test that BufferedReaderPayload can be reused across redirects."""
+ data_received = []
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("redirect", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("final", data))
+ return web.Response(text=f"Received: {data.decode()}")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ payload_data = b"buffered reader payload"
+ buffer = io.BufferedReader(io.BytesIO(payload_data)) # type: ignore[arg-type]
+ payload = BufferedReaderPayload(buffer)
+
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "Received: buffered reader payload"
+ # Both endpoints should have received the data
+ assert data_received == [("redirect", payload_data), ("final", payload_data)]
+
+
+async def test_string_io_payload_redirect(aiohttp_client: AiohttpClient) -> None:
+ """Test that StringIOPayload can be reused across redirects."""
+ data_received = []
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ data = await request.text()
+ data_received.append(("redirect", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ data = await request.text()
+ data_received.append(("final", data))
+ return web.Response(text=f"Received: {data}")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ payload_data = "string io payload"
+ string_io = io.StringIO(payload_data)
+ payload = StringIOPayload(string_io)
+
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "Received: string io payload"
+ # Both endpoints should have received the data
+ assert data_received == [("redirect", payload_data), ("final", payload_data)]
+
+
+async def test_bytes_io_payload_redirect(aiohttp_client: AiohttpClient) -> None:
+ """Test that BytesIOPayload can be reused across redirects."""
+ data_received = []
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("redirect", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("final", data))
+ return web.Response(text=f"Received: {data.decode()}")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ payload_data = b"bytes io payload"
+ bytes_io = io.BytesIO(payload_data)
+ payload = BytesIOPayload(bytes_io)
+
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "Received: bytes io payload"
+ # Both endpoints should have received the data
+ assert data_received == [("redirect", payload_data), ("final", payload_data)]
+
+
+async def test_multiple_redirects_with_bytes_payload(
+ aiohttp_client: AiohttpClient,
+) -> None:
+ """Test BytesPayload with multiple redirects."""
+ data_received = []
+
+ async def redirect1_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("redirect1", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/redirect2")
+
+ async def redirect2_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("redirect2", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("final", data))
+ return web.Response(text=f"Received after 2 redirects: {data.decode()}")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect1_handler)
+ app.router.add_post("/redirect2", redirect2_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ payload_data = b"multi-redirect-test"
+ payload = BytesPayload(payload_data)
+
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == f"Received after 2 redirects: {payload_data.decode()}"
+ # All 3 endpoints should have received the same data
+ assert data_received == [
+ ("redirect1", payload_data),
+ ("redirect2", payload_data),
+ ("final", payload_data),
+ ]
+
+
+async def test_redirect_with_empty_payload(aiohttp_client: AiohttpClient) -> None:
+ """Test redirects with empty payloads."""
+ data_received = []
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("redirect", data))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ data = await request.read()
+ data_received.append(("final", data))
+ return web.Response(text="Done")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ # Test with empty BytesPayload
+ payload = BytesPayload(b"")
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ assert data_received == [("redirect", b""), ("final", b"")]
+
+
+async def test_redirect_preserves_content_type(aiohttp_client: AiohttpClient) -> None:
+ """Test that content-type is preserved across redirects."""
+ content_types = []
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ content_types.append(("redirect", request.content_type))
+ # Use 307 to preserve POST method
+ raise web.HTTPTemporaryRedirect("/final_destination")
+
+ async def final_handler(request: web.Request) -> web.Response:
+ content_types.append(("final", request.content_type))
+ return web.Response(text="Done")
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+ app.router.add_post("/final_destination", final_handler)
+
+ client = await aiohttp_client(app)
+
+ # StringPayload should set content-type with charset
+ payload = StringPayload("test data")
+ resp = await client.post("/redirect", data=payload)
+ assert resp.status == 200
+ # Both requests should have the same content type
+ assert len(content_types) == 2
+ assert content_types[0][1] == "text/plain"
+ assert content_types[1][1] == "text/plain"
+
+
+class MockedBytesPayload(BytesPayload):
+ """A BytesPayload that tracks whether close() was called."""
+
+ def __init__(self, data: bytes) -> None:
+ super().__init__(data)
+ self.close_called = False
+
+ async def close(self) -> None:
+ self.close_called = True
+ await super().close()
+
+
+async def test_too_many_redirects_closes_payload(aiohttp_client: AiohttpClient) -> None:
+ """Test that TooManyRedirects exception closes the request payload."""
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ # Read the payload to simulate server processing
+ await request.read()
+ count = int(request.match_info.get("count", 0))
+ # Use 307 to preserve POST method
+ return web.Response(
+ status=307, headers={hdrs.LOCATION: f"/redirect/{count + 1}"}
+ )
+
+ app = web.Application()
+ app.router.add_post(r"/redirect/{count:\d+}", redirect_handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a mocked payload to verify close() is called
+ payload = MockedBytesPayload(b"test payload")
+
+ with pytest.raises(TooManyRedirects):
+ await client.post("/redirect/0", data=payload, max_redirects=2)
+
+ assert (
+ payload.close_called
+ ), "Payload.close() was not called when TooManyRedirects was raised"
+
+
+async def test_invalid_url_redirect_closes_payload(
+ aiohttp_client: AiohttpClient,
+) -> None:
+ """Test that InvalidUrlRedirectClientError exception closes the request payload."""
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ # Read the payload to simulate server processing
+ await request.read()
+ # Return an invalid URL that will cause ValueError in URL parsing
+ # Using a URL with invalid port that's out of range
+ return web.Response(
+ status=307, headers={hdrs.LOCATION: "http://example.com:999999/path"}
+ )
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a mocked payload to verify close() is called
+ payload = MockedBytesPayload(b"test payload")
+
+ with pytest.raises(
+ InvalidUrlRedirectClientError,
+ match="Server attempted redirecting to a location that does not look like a URL",
+ ):
+ await client.post("/redirect", data=payload)
+
+ assert (
+ payload.close_called
+ ), "Payload.close() was not called when InvalidUrlRedirectClientError was raised"
+
+
+async def test_non_http_redirect_closes_payload(aiohttp_client: AiohttpClient) -> None:
+ """Test that NonHttpUrlRedirectClientError exception closes the request payload."""
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ # Read the payload to simulate server processing
+ await request.read()
+ # Return a non-HTTP scheme URL
+ return web.Response(
+ status=307, headers={hdrs.LOCATION: "ftp://example.com/file"}
+ )
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a mocked payload to verify close() is called
+ payload = MockedBytesPayload(b"test payload")
+
+ with pytest.raises(NonHttpUrlRedirectClientError):
+ await client.post("/redirect", data=payload)
+
+ assert (
+ payload.close_called
+ ), "Payload.close() was not called when NonHttpUrlRedirectClientError was raised"
+
+
+async def test_invalid_redirect_origin_closes_payload(
+ aiohttp_client: AiohttpClient,
+) -> None:
+ """Test that InvalidUrlRedirectClientError exception (invalid origin) closes the request payload."""
+
+ async def redirect_handler(request: web.Request) -> web.Response:
+ # Read the payload to simulate server processing
+ await request.read()
+ # Return a URL that will fail origin() check - using a relative URL without host
+ return web.Response(status=307, headers={hdrs.LOCATION: "http:///path"})
+
+ app = web.Application()
+ app.router.add_post("/redirect", redirect_handler)
+
+ client = await aiohttp_client(app)
+
+ # Create a mocked payload to verify close() is called
+ payload = MockedBytesPayload(b"test payload")
+
+ with pytest.raises(
+ InvalidUrlRedirectClientError, match="Invalid redirect URL origin"
+ ):
+ await client.post("/redirect", data=payload)
+
+ assert (
+ payload.close_called
+ ), "Payload.close() was not called when InvalidUrlRedirectClientError (invalid origin) was raised"
diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py
index e698e8ee825..217877759c0 100644
--- a/tests/test_client_middleware.py
+++ b/tests/test_client_middleware.py
@@ -1161,3 +1161,111 @@ async def __call__(
assert received_bodies[1] == json_str2
assert received_bodies[2] == "" # GET request has no body
assert received_bodies[3] == text_data
+
+
+async def test_client_middleware_update_shorter_body(
+ aiohttp_server: AiohttpServer,
+) -> None:
+ """Test that middleware can update request body using update_body method."""
+
+ async def handler(request: web.Request) -> web.Response:
+ body = await request.text()
+ return web.Response(text=body)
+
+ app = web.Application()
+ app.router.add_post("/", handler)
+ server = await aiohttp_server(app)
+
+ async def update_body_middleware(
+ request: ClientRequest, handler: ClientHandlerType
+ ) -> ClientResponse:
+ # Update the request body
+ await request.update_body(b"short body")
+ return await handler(request)
+
+ async with ClientSession(middlewares=(update_body_middleware,)) as session:
+ async with session.post(server.make_url("/"), data=b"original body") as resp:
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "short body"
+
+
+async def test_client_middleware_update_longer_body(
+ aiohttp_server: AiohttpServer,
+) -> None:
+ """Test that middleware can update request body using update_body method."""
+
+ async def handler(request: web.Request) -> web.Response:
+ body = await request.text()
+ return web.Response(text=body)
+
+ app = web.Application()
+ app.router.add_post("/", handler)
+ server = await aiohttp_server(app)
+
+ async def update_body_middleware(
+ request: ClientRequest, handler: ClientHandlerType
+ ) -> ClientResponse:
+ # Update the request body
+ await request.update_body(b"much much longer body")
+ return await handler(request)
+
+ async with ClientSession(middlewares=(update_body_middleware,)) as session:
+ async with session.post(server.make_url("/"), data=b"original body") as resp:
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "much much longer body"
+
+
+async def test_client_middleware_update_string_body(
+ aiohttp_server: AiohttpServer,
+) -> None:
+ """Test that middleware can update request body using update_body method."""
+
+ async def handler(request: web.Request) -> web.Response:
+ body = await request.text()
+ return web.Response(text=body)
+
+ app = web.Application()
+ app.router.add_post("/", handler)
+ server = await aiohttp_server(app)
+
+ async def update_body_middleware(
+ request: ClientRequest, handler: ClientHandlerType
+ ) -> ClientResponse:
+ # Update the request body
+ await request.update_body("this is a string")
+ return await handler(request)
+
+ async with ClientSession(middlewares=(update_body_middleware,)) as session:
+ async with session.post(server.make_url("/"), data="original string") as resp:
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "this is a string"
+
+
+async def test_client_middleware_switch_types(
+ aiohttp_server: AiohttpServer,
+) -> None:
+ """Test that middleware can update request body using update_body method."""
+
+ async def handler(request: web.Request) -> web.Response:
+ body = await request.text()
+ return web.Response(text=body)
+
+ app = web.Application()
+ app.router.add_post("/", handler)
+ server = await aiohttp_server(app)
+
+ async def update_body_middleware(
+ request: ClientRequest, handler: ClientHandlerType
+ ) -> ClientResponse:
+ # Update the request body
+ await request.update_body("now a string")
+ return await handler(request)
+
+ async with ClientSession(middlewares=(update_body_middleware,)) as session:
+ async with session.post(server.make_url("/"), data=b"original bytes") as resp:
+ assert resp.status == 200
+ text = await resp.text()
+ assert text == "now a string"
diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py
index 26118288913..6da6850bafc 100644
--- a/tests/test_client_middleware_digest_auth.py
+++ b/tests/test_client_middleware_digest_auth.py
@@ -1,5 +1,6 @@
"""Test digest authentication middleware for aiohttp client."""
+import io
from hashlib import md5, sha1
from typing import Generator, Union
from unittest import mock
@@ -18,6 +19,7 @@
unescape_quotes,
)
from aiohttp.client_reqrep import ClientResponse
+from aiohttp.payload import BytesIOPayload
from aiohttp.pytest_plugin import AiohttpServer
from aiohttp.web import Application, Request, Response
@@ -154,7 +156,7 @@ async def test_authenticate_scenarios(
),
],
)
-def test_encode_validation_errors(
+async def test_encode_validation_errors(
digest_auth_mw: DigestAuthMiddleware,
challenge: DigestAuthChallenge,
expected_error: str,
@@ -162,12 +164,14 @@ def test_encode_validation_errors(
"""Test validation errors when encoding digest auth headers."""
digest_auth_mw._challenge = challenge
with pytest.raises(ClientError, match=expected_error):
- digest_auth_mw._encode("GET", URL("http://example.com/resource"), "")
+ await digest_auth_mw._encode("GET", URL("http://example.com/resource"), b"")
-def test_encode_digest_with_md5(auth_mw_with_challenge: DigestAuthMiddleware) -> None:
- header = auth_mw_with_challenge._encode(
- "GET", URL("http://example.com/resource"), ""
+async def test_encode_digest_with_md5(
+ auth_mw_with_challenge: DigestAuthMiddleware,
+) -> None:
+ header = await auth_mw_with_challenge._encode(
+ "GET", URL("http://example.com/resource"), b""
)
assert header.startswith("Digest ")
assert 'username="user"' in header
@@ -177,7 +181,7 @@ def test_encode_digest_with_md5(auth_mw_with_challenge: DigestAuthMiddleware) ->
@pytest.mark.parametrize(
"algorithm", ["MD5-SESS", "SHA-SESS", "SHA-256-SESS", "SHA-512-SESS"]
)
-def test_encode_digest_with_sess_algorithms(
+async def test_encode_digest_with_sess_algorithms(
digest_auth_mw: DigestAuthMiddleware,
qop_challenge: DigestAuthChallenge,
algorithm: str,
@@ -188,11 +192,13 @@ def test_encode_digest_with_sess_algorithms(
challenge["algorithm"] = algorithm
digest_auth_mw._challenge = challenge
- header = digest_auth_mw._encode("GET", URL("http://example.com/resource"), "")
+ header = await digest_auth_mw._encode(
+ "GET", URL("http://example.com/resource"), b""
+ )
assert f"algorithm={algorithm}" in header
-def test_encode_unsupported_algorithm(
+async def test_encode_unsupported_algorithm(
digest_auth_mw: DigestAuthMiddleware, basic_challenge: DigestAuthChallenge
) -> None:
"""Test that unsupported algorithm raises ClientError."""
@@ -202,10 +208,10 @@ def test_encode_unsupported_algorithm(
digest_auth_mw._challenge = challenge
with pytest.raises(ClientError, match="Unsupported hash algorithm"):
- digest_auth_mw._encode("GET", URL("http://example.com/resource"), "")
+ await digest_auth_mw._encode("GET", URL("http://example.com/resource"), b"")
-def test_invalid_qop_rejected(
+async def test_invalid_qop_rejected(
digest_auth_mw: DigestAuthMiddleware, basic_challenge: DigestAuthChallenge
) -> None:
"""Test that invalid Quality of Protection values are rejected."""
@@ -217,7 +223,7 @@ def test_invalid_qop_rejected(
# This should raise an error about unsupported QoP
with pytest.raises(ClientError, match="Unsupported Quality of Protection"):
- digest_auth_mw._encode("GET", URL("http://example.com"), "")
+ await digest_auth_mw._encode("GET", URL("http://example.com"), b"")
def compute_expected_digest(
@@ -264,14 +270,17 @@ def KD(secret: str, data: str) -> str:
@pytest.mark.parametrize(
("body", "body_str"),
[
- ("this is a body", "this is a body"), # String case
(b"this is a body", "this is a body"), # Bytes case
+ (
+ BytesIOPayload(io.BytesIO(b"this is a body")),
+ "this is a body",
+ ), # BytesIOPayload case
],
)
-def test_digest_response_exact_match(
+async def test_digest_response_exact_match(
qop: str,
algorithm: str,
- body: Union[str, bytes],
+ body: Union[bytes, BytesIOPayload],
body_str: str,
mock_sha1_digest: mock.MagicMock,
) -> None:
@@ -295,7 +304,7 @@ def test_digest_response_exact_match(
auth._last_nonce_bytes = nonce.encode("utf-8")
auth._nonce_count = nc
- header = auth._encode(method, URL(f"http://host{uri}"), body)
+ header = await auth._encode(method, URL(f"http://host{uri}"), body)
# Get expected digest
expected = compute_expected_digest(
@@ -402,7 +411,7 @@ def test_middleware_invalid_login() -> None:
DigestAuthMiddleware("user:name", "pass")
-def test_escaping_quotes_in_auth_header() -> None:
+async def test_escaping_quotes_in_auth_header() -> None:
"""Test that double quotes are properly escaped in auth header."""
auth = DigestAuthMiddleware('user"with"quotes', "pass")
auth._challenge = DigestAuthChallenge(
@@ -413,7 +422,7 @@ def test_escaping_quotes_in_auth_header() -> None:
opaque='opaque"with"quotes',
)
- header = auth._encode("GET", URL("http://example.com/path"), "")
+ header = await auth._encode("GET", URL("http://example.com/path"), b"")
# Check that quotes are escaped in the header
assert 'username="user\\"with\\"quotes"' in header
@@ -422,13 +431,15 @@ def test_escaping_quotes_in_auth_header() -> None:
assert 'opaque="opaque\\"with\\"quotes"' in header
-def test_template_based_header_construction(
+async def test_template_based_header_construction(
auth_mw_with_challenge: DigestAuthMiddleware,
mock_sha1_digest: mock.MagicMock,
mock_md5_digest: mock.MagicMock,
) -> None:
"""Test that the template-based header construction works correctly."""
- header = auth_mw_with_challenge._encode("GET", URL("http://example.com/test"), "")
+ header = await auth_mw_with_challenge._encode(
+ "GET", URL("http://example.com/test"), b""
+ )
# Split the header into scheme and parameters
scheme, params_str = header.split(" ", 1)
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index 70b30dd14f2..b1807b96d82 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -4,9 +4,10 @@
import pathlib
import sys
import urllib.parse
+import warnings
from collections.abc import Callable, Iterable
from http.cookies import BaseCookie, Morsel, SimpleCookie
-from typing import Any, Optional, Protocol, Union
+from typing import Any, AsyncIterator, Optional, Protocol, Union
from unittest import mock
import pytest
@@ -750,7 +751,7 @@ async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> No
)
resp = await req.send(conn)
assert "/" == req.url.path
- assert b"life=42" == req.body._value
+ assert b"life=42" == req.body._value # type: ignore[union-attr]
assert "application/x-www-form-urlencoded" == req.headers["CONTENT-TYPE"]
await req.close()
resp.close()
@@ -787,7 +788,7 @@ async def test_get_with_data(loop) -> None:
meth, URL("http://python.org/"), data={"life": "42"}, loop=loop
)
assert "/" == req.url.path
- assert b"life=42" == req.body._value
+ assert b"life=42" == req.body._value # type: ignore[union-attr]
await req.close()
@@ -893,6 +894,7 @@ async def test_chunked_explicit(loop, conn) -> None:
req = ClientRequest("post", URL("http://python.org/"), chunked=True, loop=loop)
with mock.patch("aiohttp.client_reqrep.StreamWriter") as m_writer:
m_writer.return_value.write_headers = mock.AsyncMock()
+ m_writer.return_value.write_eof = mock.AsyncMock()
resp = await req.send(conn)
assert "chunked" == req.headers["TRANSFER-ENCODING"]
@@ -949,7 +951,65 @@ async def test_precompressed_data_stays_intact(loop) -> None:
await req.close()
-async def test_file_upload_not_chunked_seek(loop) -> None:
+async def test_body_with_size_sets_content_length(
+ loop: asyncio.AbstractEventLoop,
+) -> None:
+ """Test that when body has a size and no Content-Length header is set, it gets added."""
+ # Create a BytesPayload which has a size property
+ data = b"test data"
+
+ # Create request with data that will create a BytesPayload
+ req = ClientRequest(
+ "post",
+ URL("http://python.org/"),
+ data=data,
+ loop=loop,
+ )
+
+ # Verify Content-Length was set from body.size
+ assert req.headers["CONTENT-LENGTH"] == str(len(data))
+ assert req.body is not None
+ assert req._body is not None # When _body is set, body returns it
+ assert req._body.size == len(data)
+ await req.close()
+
+
+async def test_body_payload_with_size_no_content_length(
+ loop: asyncio.AbstractEventLoop,
+) -> None:
+ """Test that when a body payload with size is set directly, Content-Length is added."""
+ # Create a payload with a known size
+ data = b"payload data"
+ bytes_payload = payload.BytesPayload(data)
+
+ # Create request with no data initially
+ req = ClientRequest(
+ "post",
+ URL("http://python.org/"),
+ loop=loop,
+ )
+
+ # Set body directly (bypassing update_body_from_data to avoid it setting Content-Length)
+ req._body = bytes_payload
+
+ # Ensure conditions for the code path we want to test
+ assert req._body is not None
+ assert hdrs.CONTENT_LENGTH not in req.headers
+ assert req._body.size is not None
+ assert not req.chunked
+
+ # Now trigger update_transfer_encoding which should set Content-Length
+ req.update_transfer_encoding()
+
+ # Verify Content-Length was set from body.size
+ assert req.headers["CONTENT-LENGTH"] == str(len(data))
+ assert req.body is bytes_payload
+ assert req._body is bytes_payload # Access _body which is the Payload
+ assert req._body.size == len(data)
+ await req.close()
+
+
+async def test_file_upload_not_chunked_seek(loop: asyncio.AbstractEventLoop) -> None:
file_path = pathlib.Path(__file__).parent / "aiohttp.png"
with file_path.open("rb") as f:
f.seek(100)
@@ -1201,6 +1261,7 @@ def read(self, decode=False):
async def test_oserror_on_write_bytes(loop, conn) -> None:
req = ClientRequest("POST", URL("http://python.org/"), loop=loop)
+ req.body = b"test data"
writer = WriterMock()
writer.write.side_effect = OSError
@@ -1584,7 +1645,17 @@ async def test_write_bytes_with_iterable_content_length_limit(
"""Test that write_bytes respects content_length limit for iterable data."""
# Test with iterable data
req = ClientRequest("post", URL("http://python.org/"), loop=loop)
- req.body = data
+
+ # Convert list to async generator if needed
+ if isinstance(data, list):
+
+ async def gen() -> AsyncIterator[bytes]:
+ for chunk in data:
+ yield chunk
+
+ req.body = gen() # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
+ else:
+ req.body = data
writer = StreamWriter(protocol=conn.protocol, loop=loop)
# Use content_length=7 to truncate at the middle of Part2
@@ -1599,7 +1670,13 @@ async def test_write_bytes_empty_iterable_with_content_length(
) -> None:
"""Test that write_bytes handles empty iterable body with content_length."""
req = ClientRequest("post", URL("http://python.org/"), loop=loop)
- req.body = [] # Empty iterable
+
+ # Create an empty async generator
+ async def gen() -> AsyncIterator[bytes]:
+ return
+ yield # pragma: no cover # This makes it a generator but never executes
+
+ req.body = gen() # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
writer = StreamWriter(protocol=conn.protocol, loop=loop)
# Use content_length=10 with empty body
@@ -1608,3 +1685,392 @@ async def test_write_bytes_empty_iterable_with_content_length(
# Verify nothing was written
assert len(buf) == 0
await req.close()
+
+
+async def test_warn_if_unclosed_payload_via_body_setter(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that _warn_if_unclosed_payload is called when setting body with unclosed payload."""
+ req = make_request("POST", "http://python.org/")
+
+ # First set a payload that needs manual closing (autoclose=False)
+ file_payload = payload.BufferedReaderPayload(
+ io.BufferedReader(io.BytesIO(b"test data")), # type: ignore[arg-type]
+ encoding="utf-8",
+ )
+ req.body = file_payload
+
+ # Setting body again should trigger the warning for the previous payload
+ with pytest.warns(
+ ResourceWarning,
+ match="The previous request body contains unclosed resources",
+ ):
+ req.body = b"new data"
+
+ await req.close()
+
+
+async def test_no_warn_for_autoclose_payload_via_body_setter(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that no warning is issued for payloads with autoclose=True."""
+ req = make_request("POST", "http://python.org/")
+
+ # First set BytesIOPayload which has autoclose=True
+ bytes_payload = payload.BytesIOPayload(io.BytesIO(b"test data"))
+ req.body = bytes_payload
+
+ # Setting body again should not trigger warning since previous payload has autoclose=True
+ with warnings.catch_warnings(record=True) as warning_list:
+ warnings.simplefilter("always")
+ req.body = b"new data"
+
+ # Filter out any non-ResourceWarning warnings
+ resource_warnings = [
+ w for w in warning_list if issubclass(w.category, ResourceWarning)
+ ]
+ assert len(resource_warnings) == 0
+
+ await req.close()
+
+
+async def test_no_warn_for_consumed_payload_via_body_setter(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that no warning is issued for already consumed payloads."""
+ req = make_request("POST", "http://python.org/")
+
+ # Create a payload that needs manual closing
+ file_payload = payload.BufferedReaderPayload(
+ io.BufferedReader(io.BytesIO(b"test data")), # type: ignore[arg-type]
+ encoding="utf-8",
+ )
+ req.body = file_payload
+
+ # Properly close the payload to mark it as consumed
+ await file_payload.close()
+
+ # Setting body again should not trigger warning since previous payload is consumed
+ with warnings.catch_warnings(record=True) as warning_list:
+ warnings.simplefilter("always")
+ req.body = b"new data"
+
+ # Filter out any non-ResourceWarning warnings
+ resource_warnings = [
+ w for w in warning_list if issubclass(w.category, ResourceWarning)
+ ]
+ assert len(resource_warnings) == 0
+
+ await req.close()
+
+
+async def test_warn_if_unclosed_payload_via_update_body_from_data(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that _warn_if_unclosed_payload is called via update_body_from_data."""
+ req = make_request("POST", "http://python.org/")
+
+ # First set a payload that needs manual closing
+ file_payload = payload.BufferedReaderPayload(
+ io.BufferedReader(io.BytesIO(b"initial data")), # type: ignore[arg-type]
+ encoding="utf-8",
+ )
+ req.update_body_from_data(file_payload)
+
+ # Create FormData for second update
+ form = aiohttp.FormData()
+ form.add_field("test", "value")
+
+ # update_body_from_data should trigger the warning for the previous payload
+ with pytest.warns(
+ ResourceWarning,
+ match="The previous request body contains unclosed resources",
+ ):
+ req.update_body_from_data(form)
+
+ await req.close()
+
+
+async def test_warn_via_update_with_file_payload(
+ make_request: _RequestMaker,
+) -> None:
+ """Test warning via update_body_from_data with file-like object."""
+ req = make_request("POST", "http://python.org/")
+
+ # First create a file-like object that results in BufferedReaderPayload
+ buffered1 = io.BufferedReader(io.BytesIO(b"file content 1")) # type: ignore[arg-type]
+ req.update_body_from_data(buffered1)
+
+ # Second update should warn about the first payload
+ buffered2 = io.BufferedReader(io.BytesIO(b"file content 2")) # type: ignore[arg-type]
+
+ with pytest.warns(
+ ResourceWarning,
+ match="The previous request body contains unclosed resources",
+ ):
+ req.update_body_from_data(buffered2)
+
+ await req.close()
+
+
+async def test_no_warn_for_simple_data_via_update_body_from_data(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that no warning is issued for simple data types."""
+ req = make_request("POST", "http://python.org/")
+
+ # Simple bytes data should not trigger warning
+ with warnings.catch_warnings(record=True) as warning_list:
+ warnings.simplefilter("always")
+ req.update_body_from_data(b"simple data")
+
+ # Filter out any non-ResourceWarning warnings
+ resource_warnings = [
+ w for w in warning_list if issubclass(w.category, ResourceWarning)
+ ]
+ assert len(resource_warnings) == 0
+
+ await req.close()
+
+
+async def test_update_body_closes_previous_payload(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that update_body properly closes the previous payload."""
+ req = make_request("POST", "http://python.org/")
+
+ # Create a mock payload that tracks if it was closed
+ mock_payload = mock.Mock(spec=payload.Payload)
+ mock_payload.close = mock.AsyncMock()
+
+ # Set initial payload
+ req._body = mock_payload
+
+ # Update body with new data
+ await req.update_body(b"new body data")
+
+ # Verify the previous payload was closed
+ mock_payload.close.assert_called_once()
+
+ # Verify new body is set (it's a BytesPayload now)
+ assert isinstance(req.body, payload.BytesPayload)
+
+ await req.close()
+
+
+async def test_body_setter_closes_previous_payload(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that body setter properly closes the previous payload."""
+ req = make_request("POST", "http://python.org/")
+
+ # Create a mock payload that tracks if it was closed
+ # We need to use create_autospec to ensure all methods are available
+ mock_payload = mock.create_autospec(payload.Payload, instance=True)
+
+ # Set initial payload
+ req._body = mock_payload
+
+ # Update body with new data using setter
+ req.body = b"new body data"
+
+ # Verify the previous payload was closed using _close
+ mock_payload._close.assert_called_once()
+
+ # Verify new body is set (it's a BytesPayload now)
+ assert isinstance(req.body, payload.BytesPayload)
+
+ await req.close()
+
+
+async def test_update_body_with_different_types(
+ make_request: _RequestMaker,
+) -> None:
+ """Test update_body with various data types."""
+ req = make_request("POST", "http://python.org/")
+
+ # Test with bytes
+ await req.update_body(b"bytes data")
+ assert isinstance(req.body, payload.BytesPayload)
+
+ # Test with string
+ await req.update_body("string data")
+ assert isinstance(req.body, payload.BytesPayload)
+
+ # Test with None (clears body)
+ await req.update_body(None)
+ assert req.body == b"" # type: ignore[comparison-overlap] # empty body is represented as b""
+
+ await req.close()
+
+
+async def test_update_body_with_chunked_encoding(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that update_body properly handles chunked transfer encoding."""
+ # Create request with chunked=True
+ req = make_request("POST", "http://python.org/", chunked=True)
+
+ # Verify Transfer-Encoding header is set
+ assert req.headers["Transfer-Encoding"] == "chunked"
+ assert "Content-Length" not in req.headers
+
+ # Update body - should maintain chunked encoding
+ await req.update_body(b"chunked data")
+ assert req.headers["Transfer-Encoding"] == "chunked"
+ assert "Content-Length" not in req.headers
+ assert isinstance(req.body, payload.BytesPayload)
+
+ # Update with different body - chunked should remain
+ await req.update_body(b"different chunked data")
+ assert req.headers["Transfer-Encoding"] == "chunked"
+ assert "Content-Length" not in req.headers
+
+ # Clear body - chunked header should remain
+ await req.update_body(None)
+ assert req.headers["Transfer-Encoding"] == "chunked"
+ assert "Content-Length" not in req.headers
+
+ await req.close()
+
+
+async def test_update_body_get_method_with_none_body(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that update_body with GET method and None body doesn't call update_transfer_encoding."""
+ # Create GET request
+ req = make_request("GET", "http://python.org/")
+
+ # GET requests shouldn't have Transfer-Encoding or Content-Length initially
+ assert "Transfer-Encoding" not in req.headers
+ assert "Content-Length" not in req.headers
+
+ # Update body to None - should not trigger update_transfer_encoding
+ # This covers the branch where body is None AND method is in GET_METHODS
+ await req.update_body(None)
+
+ # Headers should remain unchanged
+ assert "Transfer-Encoding" not in req.headers
+ assert "Content-Length" not in req.headers
+
+ await req.close()
+
+
+async def test_update_body_updates_content_length(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that update_body properly updates Content-Length header when body size changes."""
+ req = make_request("POST", "http://python.org/")
+
+ # Set initial body with known size
+ await req.update_body(b"initial data")
+ initial_content_length = req.headers.get("Content-Length")
+ assert initial_content_length == "12" # len(b"initial data") = 12
+
+ # Update body with different size
+ await req.update_body(b"much longer data than before")
+ new_content_length = req.headers.get("Content-Length")
+ assert new_content_length == "28" # len(b"much longer data than before") = 28
+
+ # Update body with shorter data
+ await req.update_body(b"short")
+ assert req.headers.get("Content-Length") == "5" # len(b"short") = 5
+
+ # Clear body
+ await req.update_body(None)
+ # For None body, Content-Length should not be set
+ assert "Content-Length" not in req.headers
+
+ await req.close()
+
+
+async def test_warn_stacklevel_points_to_user_code(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that the warning stacklevel correctly points to user code."""
+ req = make_request("POST", "http://python.org/")
+
+ # First set a payload that needs manual closing (autoclose=False)
+ file_payload = payload.BufferedReaderPayload(
+ io.BufferedReader(io.BytesIO(b"test data")), # type: ignore[arg-type]
+ encoding="utf-8",
+ )
+ req.body = file_payload
+
+ # Capture warnings with their details
+ with warnings.catch_warnings(record=True) as warning_list:
+ warnings.simplefilter("always", ResourceWarning)
+ # This line should be reported as the warning source
+ req.body = b"new data" # LINE TO BE REPORTED
+
+ # Find the ResourceWarning
+ resource_warnings = [
+ w for w in warning_list if issubclass(w.category, ResourceWarning)
+ ]
+ assert len(resource_warnings) == 1
+
+ warning = resource_warnings[0]
+ # The warning should point to the line where we set req.body, not inside the library
+ # Call chain: user code -> body setter -> _warn_if_unclosed_payload
+ # stacklevel=3 is used in body setter to skip the setter and _warn_if_unclosed_payload
+ assert warning.filename == __file__
+ # The line number should be the line with "req.body = b'new data'"
+ # We can't hardcode the line number, but we can verify it's not pointing
+ # to client_reqrep.py (the library code)
+ assert "client_reqrep.py" not in warning.filename
+
+ await req.close()
+
+
+async def test_warn_stacklevel_update_body_from_data(
+ make_request: _RequestMaker,
+) -> None:
+ """Test that warning stacklevel is correct when called from update_body_from_data."""
+ req = make_request("POST", "http://python.org/")
+
+ # First set a payload that needs manual closing (autoclose=False)
+ file_payload = payload.BufferedReaderPayload(
+ io.BufferedReader(io.BytesIO(b"test data")), # type: ignore[arg-type]
+ encoding="utf-8",
+ )
+ req.update_body_from_data(file_payload)
+
+ # Capture warnings with their details
+ with warnings.catch_warnings(record=True) as warning_list:
+ warnings.simplefilter("always", ResourceWarning)
+ # This line should be reported as the warning source
+ req.update_body_from_data(b"new data") # LINE TO BE REPORTED
+
+ # Find the ResourceWarning
+ resource_warnings = [
+ w for w in warning_list if issubclass(w.category, ResourceWarning)
+ ]
+ assert len(resource_warnings) == 1
+
+ warning = resource_warnings[0]
+ # For update_body_from_data, stacklevel=3 points to this test file
+ # Call chain: user code -> update_body_from_data -> _warn_if_unclosed_payload
+ assert warning.filename == __file__
+ assert "client_reqrep.py" not in warning.filename
+
+ await req.close()
+
+
+async def test_expect100_with_body_becomes_none() -> None:
+ """Test that write_bytes handles body becoming None after expect100 handling."""
+ # Create a mock writer and connection
+ mock_writer = mock.AsyncMock()
+ mock_conn = mock.Mock()
+
+ # Create a request
+ req = ClientRequest(
+ "POST", URL("http://test.example.com/"), loop=asyncio.get_event_loop()
+ )
+ req._body = mock.Mock() # Start with a body
+
+ # Now set body to None to simulate a race condition
+ # where req._body is set to None after expect100 handling
+ req._body = None
+
+ await req.write_bytes(mock_writer, mock_conn, None)
+ await req.close()
diff --git a/tests/test_client_session.py b/tests/test_client_session.py
index 0656a9ed023..e31144abd0b 100644
--- a/tests/test_client_session.py
+++ b/tests/test_client_session.py
@@ -422,7 +422,9 @@ async def test_reraise_os_error(create_session) -> None:
err = OSError(1, "permission error")
req = mock.Mock()
req_factory = mock.Mock(return_value=req)
- req.send = mock.Mock(side_effect=err)
+ req.send = mock.AsyncMock(side_effect=err)
+ req._body = mock.Mock()
+ req._body.close = mock.AsyncMock()
session = await create_session(request_class=req_factory)
async def create_connection(req, traces, timeout):
@@ -446,7 +448,9 @@ class UnexpectedException(BaseException):
err = UnexpectedException("permission error")
req = mock.Mock()
req_factory = mock.Mock(return_value=req)
- req.send = mock.Mock(side_effect=err)
+ req.send = mock.AsyncMock(side_effect=err)
+ req._body = mock.Mock()
+ req._body.close = mock.AsyncMock()
session = await create_session(request_class=req_factory)
connections = []
@@ -496,6 +500,7 @@ async def test_ws_connect_allowed_protocols(
resp.start = mock.AsyncMock()
req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True)
+ req._body = None # No body for WebSocket upgrade requests
req_factory = mock.Mock(return_value=req)
req.send = mock.AsyncMock(return_value=resp)
# BaseConnector allows all high level protocols by default
@@ -553,6 +558,7 @@ async def test_ws_connect_unix_socket_allowed_protocols(
resp.start = mock.AsyncMock()
req = mock.create_autospec(aiohttp.ClientRequest, spec_set=True)
+ req._body = None # No body for WebSocket upgrade requests
req_factory = mock.Mock(return_value=req)
req.send = mock.AsyncMock(return_value=resp)
# UnixConnector allows all high level protocols by default and unix sockets
diff --git a/tests/test_formdata.py b/tests/test_formdata.py
index 4b7c94ac4cd..5fe8f92b097 100644
--- a/tests/test_formdata.py
+++ b/tests/test_formdata.py
@@ -4,6 +4,8 @@
import pytest
from aiohttp import FormData, web
+from aiohttp.http_writer import StreamWriter
+from aiohttp.pytest_plugin import AiohttpClient
@pytest.fixture
@@ -105,8 +107,8 @@ async def test_formdata_field_name_is_not_quoted(buf, writer) -> None:
assert b'name="email 1"' in buf
-async def test_mark_formdata_as_processed(aiohttp_client) -> None:
- async def handler(request):
+async def test_formdata_is_reusable(aiohttp_client: AiohttpClient) -> None:
+ async def handler(request: web.Request) -> web.Response:
return web.Response()
app = web.Application()
@@ -117,10 +119,170 @@ async def handler(request):
data = FormData()
data.add_field("test", "test_value", content_type="application/json")
- resp = await client.post("/", data=data)
- assert len(data._writer._parts) == 1
+ # First request
+ resp1 = await client.post("/", data=data)
+ assert resp1.status == 200
+ resp1.release()
- with pytest.raises(RuntimeError):
- await client.post("/", data=data)
+ # Second request - should work without RuntimeError
+ resp2 = await client.post("/", data=data)
+ assert resp2.status == 200
+ resp2.release()
- resp.release()
+ # Third request to ensure continued reusability
+ resp3 = await client.post("/", data=data)
+ assert resp3.status == 200
+ resp3.release()
+
+
+async def test_formdata_reusability_multipart(
+ writer: StreamWriter, buf: bytearray
+) -> None:
+ form = FormData()
+ form.add_field("name", "value")
+ form.add_field("file", b"content", filename="test.txt", content_type="text/plain")
+
+ # First call - should generate multipart payload
+ payload1 = form()
+ assert form.is_multipart
+ buf.clear()
+ await payload1.write(writer)
+ result1 = bytes(buf)
+
+ # Verify first result contains expected content
+ assert b"name" in result1
+ assert b"value" in result1
+ assert b"test.txt" in result1
+ assert b"content" in result1
+ assert b"text/plain" in result1
+
+ # Second call - should generate identical multipart payload
+ payload2 = form()
+ buf.clear()
+ await payload2.write(writer)
+ result2 = bytes(buf)
+
+ # Results should be identical (same boundary and content)
+ assert result1 == result2
+
+ # Third call to ensure continued reusability
+ payload3 = form()
+ buf.clear()
+ await payload3.write(writer)
+ result3 = bytes(buf)
+
+ assert result1 == result3
+
+
+async def test_formdata_reusability_urlencoded(
+ writer: StreamWriter, buf: bytearray
+) -> None:
+ form = FormData()
+ form.add_field("key1", "value1")
+ form.add_field("key2", "value2")
+
+ # First call - should generate urlencoded payload
+ payload1 = form()
+ assert not form.is_multipart
+ buf.clear()
+ await payload1.write(writer)
+ result1 = bytes(buf)
+
+ # Verify first result contains expected content
+ assert b"key1=value1" in result1
+ assert b"key2=value2" in result1
+
+ # Second call - should generate identical urlencoded payload
+ payload2 = form()
+ buf.clear()
+ await payload2.write(writer)
+ result2 = bytes(buf)
+
+ # Results should be identical
+ assert result1 == result2
+
+ # Third call to ensure continued reusability
+ payload3 = form()
+ buf.clear()
+ await payload3.write(writer)
+ result3 = bytes(buf)
+
+ assert result1 == result3
+
+
+async def test_formdata_reusability_after_adding_fields(
+ writer: StreamWriter, buf: bytearray
+) -> None:
+ form = FormData()
+ form.add_field("field1", "value1")
+
+ # First call
+ payload1 = form()
+ buf.clear()
+ await payload1.write(writer)
+ result1 = bytes(buf)
+
+ # Add more fields after first call
+ form.add_field("field2", "value2")
+
+ # Second call should include new field
+ payload2 = form()
+ buf.clear()
+ await payload2.write(writer)
+ result2 = bytes(buf)
+
+ # Results should be different
+ assert result1 != result2
+ assert b"field1=value1" in result2
+ assert b"field2=value2" in result2
+ assert b"field2=value2" not in result1
+
+ # Third call should be same as second
+ payload3 = form()
+ buf.clear()
+ await payload3.write(writer)
+ result3 = bytes(buf)
+
+ assert result2 == result3
+
+
+async def test_formdata_reusability_with_io_fields(
+ writer: StreamWriter, buf: bytearray
+) -> None:
+ form = FormData()
+
+ # Create BytesIO and StringIO objects
+ bytes_io = io.BytesIO(b"bytes content")
+ string_io = io.StringIO("string content")
+
+ form.add_field(
+ "bytes_field",
+ bytes_io,
+ filename="bytes.bin",
+ content_type="application/octet-stream",
+ )
+ form.add_field(
+ "string_field", string_io, filename="text.txt", content_type="text/plain"
+ )
+
+ # First call
+ payload1 = form()
+ buf.clear()
+ await payload1.write(writer)
+ result1 = bytes(buf)
+
+ assert b"bytes content" in result1
+ assert b"string content" in result1
+
+ # Reset IO objects for reuse
+ bytes_io.seek(0)
+ string_io.seek(0)
+
+ # Second call - should work with reset IO objects
+ payload2 = form()
+ buf.clear()
+ await payload2.write(writer)
+ result2 = bytes(buf)
+
+ # Should produce identical results
+ assert result1 == result2
diff --git a/tests/test_multipart.py b/tests/test_multipart.py
index c76d523ca86..75b73a78070 100644
--- a/tests/test_multipart.py
+++ b/tests/test_multipart.py
@@ -6,6 +6,7 @@
from unittest import mock
import pytest
+from multidict import CIMultiDict, CIMultiDictProxy
import aiohttp
from aiohttp import payload
@@ -17,7 +18,11 @@
CONTENT_TYPE,
)
from aiohttp.helpers import parse_mimetype
-from aiohttp.multipart import MultipartResponseWrapper
+from aiohttp.multipart import (
+ BodyPartReader,
+ BodyPartReaderPayload,
+ MultipartResponseWrapper,
+)
from aiohttp.streams import StreamReader
BOUNDARY = b"--:"
@@ -43,7 +48,23 @@ async def write(chunk):
@pytest.fixture
-def writer():
+def buf2() -> bytearray:
+ return bytearray()
+
+
+@pytest.fixture
+def stream2(buf2: bytearray) -> mock.Mock:
+ writer = mock.Mock()
+
+ async def write(chunk: bytes) -> None:
+ buf2.extend(chunk)
+
+ writer.write.side_effect = write
+ return writer
+
+
+@pytest.fixture
+def writer() -> aiohttp.MultipartWriter:
return aiohttp.MultipartWriter(boundary=":")
@@ -1550,3 +1571,179 @@ async def test_async_for_bodypart() -> None:
part = aiohttp.BodyPartReader(boundary=b"--:", headers={}, content=stream)
async for data in part:
assert data == b"foobarbaz"
+
+
+async def test_multipart_writer_reusability(
+ buf: bytearray,
+ stream: mock.Mock,
+ buf2: bytearray,
+ stream2: mock.Mock,
+ writer: aiohttp.MultipartWriter,
+) -> None:
+ """Test that MultipartWriter can be written multiple times."""
+ # Add some parts
+ writer.append("text content")
+ writer.append(b"binary content", {"Content-Type": "application/octet-stream"})
+ writer.append_json({"key": "value"})
+
+ # Test as_bytes multiple times
+ bytes1 = await writer.as_bytes()
+ bytes2 = await writer.as_bytes()
+ bytes3 = await writer.as_bytes()
+
+ # All as_bytes calls should return identical data
+ assert bytes1 == bytes2 == bytes3
+
+ # Verify content is there
+ assert b"text content" in bytes1
+ assert b"binary content" in bytes1
+ assert b'"key": "value"' in bytes1
+
+ # First write
+ buf.clear()
+ await writer.write(stream)
+ result1 = bytes(buf)
+
+ # Second write - should produce identical output
+ buf2.clear()
+ await writer.write(stream2)
+ result2 = bytes(buf2)
+
+ # Results should be identical
+ assert result1 == result2
+
+ # Third write to ensure continued reusability
+ buf.clear()
+ await writer.write(stream)
+ result3 = bytes(buf)
+
+ assert result1 == result3
+
+ # as_bytes should still work after writes
+ bytes4 = await writer.as_bytes()
+ assert bytes1 == bytes4
+
+
+async def test_multipart_writer_reusability_with_io_payloads(
+ buf: bytearray,
+ stream: mock.Mock,
+ buf2: bytearray,
+ stream2: mock.Mock,
+ writer: aiohttp.MultipartWriter,
+) -> None:
+ """Test that MultipartWriter with IO payloads can be reused."""
+ # Create IO objects
+ bytes_io = io.BytesIO(b"bytes io content")
+ string_io = io.StringIO("string io content")
+
+ # Add IO payloads
+ writer.append(bytes_io, {"Content-Type": "application/octet-stream"})
+ writer.append(string_io, {"Content-Type": "text/plain"})
+
+ # Test as_bytes multiple times
+ bytes1 = await writer.as_bytes()
+ bytes2 = await writer.as_bytes()
+
+ # All as_bytes calls should return identical data
+ assert bytes1 == bytes2
+ assert b"bytes io content" in bytes1
+ assert b"string io content" in bytes1
+
+ # First write
+ buf.clear()
+ await writer.write(stream)
+ result1 = bytes(buf)
+
+ assert b"bytes io content" in result1
+ assert b"string io content" in result1
+
+ # Reset IO objects for reuse
+ bytes_io.seek(0)
+ string_io.seek(0)
+
+ # Second write
+ buf2.clear()
+ await writer.write(stream2)
+ result2 = bytes(buf2)
+
+ # Should produce identical results
+ assert result1 == result2
+
+ # Test as_bytes after writes (IO objects should auto-reset)
+ bytes3 = await writer.as_bytes()
+ assert bytes1 == bytes3
+
+
+async def test_body_part_reader_payload_as_bytes() -> None:
+ """Test that BodyPartReaderPayload.as_bytes raises TypeError."""
+ # Create a mock BodyPartReader
+ headers = CIMultiDictProxy(CIMultiDict({CONTENT_TYPE: "text/plain"}))
+ protocol = mock.Mock(_reading_paused=False)
+ stream = StreamReader(protocol, 2**16, loop=asyncio.get_event_loop())
+ body_part = BodyPartReader(BOUNDARY, headers, stream)
+
+ # Create the payload
+ payload = BodyPartReaderPayload(body_part)
+
+ # Test that as_bytes raises TypeError
+ with pytest.raises(TypeError, match="Unable to read body part as bytes"):
+ await payload.as_bytes()
+
+ # Test that decode also raises TypeError
+ with pytest.raises(TypeError, match="Unable to decode"):
+ payload.decode()
+
+
+async def test_multipart_writer_close_with_exceptions() -> None:
+ """Test that MultipartWriter.close() continues closing all parts even if one raises."""
+ writer = aiohttp.MultipartWriter()
+
+ # Create mock payloads
+ # First part will raise during close
+ part1 = mock.Mock()
+ part1.autoclose = False
+ part1.consumed = False
+ part1.close = mock.AsyncMock(side_effect=RuntimeError("Part 1 close failed"))
+
+ # Second part should still get closed
+ part2 = mock.Mock()
+ part2.autoclose = False
+ part2.consumed = False
+ part2.close = mock.AsyncMock()
+
+ # Third part with autoclose=True should not be closed
+ part3 = mock.Mock()
+ part3.autoclose = True
+ part3.consumed = False
+ part3.close = mock.AsyncMock()
+
+ # Fourth part already consumed should not be closed
+ part4 = mock.Mock()
+ part4.autoclose = False
+ part4.consumed = True
+ part4.close = mock.AsyncMock()
+
+ # Add parts to writer's internal list
+ writer._parts = [
+ (part1, "", ""),
+ (part2, "", ""),
+ (part3, "", ""),
+ (part4, "", ""),
+ ]
+
+ # Close the writer - should not raise despite part1 failing
+ await writer.close()
+
+ # Verify close was called on appropriate parts
+ part1.close.assert_called_once()
+ part2.close.assert_called_once() # Should still be called despite part1 failing
+ part3.close.assert_not_called() # autoclose=True
+ part4.close.assert_not_called() # consumed=True
+
+ # Verify writer is marked as consumed
+ assert writer._consumed is True
+
+ # Calling close again should do nothing
+ await writer.close()
+ assert part1.close.call_count == 1
+ assert part2.close.call_count == 1
diff --git a/tests/test_payload.py b/tests/test_payload.py
index af0230776e5..b810a68f8b7 100644
--- a/tests/test_payload.py
+++ b/tests/test_payload.py
@@ -1,9 +1,12 @@
import array
+import asyncio
import io
+import json
import unittest.mock
from collections.abc import AsyncIterator
from io import StringIO
-from typing import Optional, Union
+from pathlib import Path
+from typing import Optional, TextIO, Union
import pytest
from multidict import CIMultiDict
@@ -12,6 +15,35 @@
from aiohttp.abc import AbstractStreamWriter
+class BufferWriter(AbstractStreamWriter):
+ """Test writer that captures written bytes in a buffer."""
+
+ def __init__(self) -> None:
+ self.buffer = bytearray()
+
+ async def write(
+ self, chunk: Union[bytes, bytearray, "memoryview[int]", "memoryview[bytes]"]
+ ) -> None:
+ self.buffer.extend(bytes(chunk))
+
+ async def write_eof(self, chunk: bytes = b"") -> None:
+ """No-op for test writer."""
+
+ async def drain(self) -> None:
+ """No-op for test writer."""
+
+ def enable_compression(
+ self, encoding: str = "deflate", strategy: Optional[int] = None
+ ) -> None:
+ """Compression not implemented for test writer."""
+
+ def enable_chunking(self) -> None:
+ """Chunking not implemented for test writer."""
+
+ async def write_headers(self, status_line: str, headers: CIMultiDict[str]) -> None:
+ """Headers not captured for payload tests."""
+
+
@pytest.fixture(autouse=True)
def cleanup(
cleanup_payload_pending_file_closes: None,
@@ -417,6 +449,43 @@ async def test_textio_payload_with_encoding() -> None:
assert writer.get_written_bytes() == b"hello wo"
+async def test_textio_payload_as_bytes() -> None:
+ """Test TextIOPayload.as_bytes method with different encodings."""
+ # Test with UTF-8 encoding
+ data = io.StringIO("Hello 世界")
+ p = payload.TextIOPayload(data, encoding="utf-8")
+
+ # Test as_bytes() method
+ result = await p.as_bytes()
+ assert result == "Hello 世界".encode()
+
+ # Test that position is restored for multiple reads
+ result2 = await p.as_bytes()
+ assert result2 == "Hello 世界".encode()
+
+ # Test with different encoding parameter (should use instance encoding)
+ result3 = await p.as_bytes(encoding="latin-1")
+ assert result3 == "Hello 世界".encode() # Should still use utf-8
+
+ # Test with different encoding in payload
+ data2 = io.StringIO("Hello World")
+ p2 = payload.TextIOPayload(data2, encoding="latin-1")
+ result4 = await p2.as_bytes()
+ assert result4 == b"Hello World" # latin-1 encoding
+
+ # Test with no explicit encoding (defaults to utf-8)
+ data3 = io.StringIO("Test データ")
+ p3 = payload.TextIOPayload(data3)
+ result5 = await p3.as_bytes()
+ assert result5 == "Test データ".encode()
+
+ # Test with encoding errors parameter
+ data4 = io.StringIO("Test")
+ p4 = payload.TextIOPayload(data4, encoding="ascii")
+ result6 = await p4.as_bytes(errors="strict")
+ assert result6 == b"Test"
+
+
async def test_bytesio_payload_backwards_compatibility() -> None:
"""Test BytesIOPayload.write() backwards compatibility delegates to write_with_length()."""
data = io.BytesIO(b"test data")
@@ -455,3 +524,607 @@ async def gen() -> AsyncIterator[bytes]:
# Should return early without writing anything
await p.write_with_length(writer, 10)
assert writer.get_written_bytes() == b""
+
+
+async def test_async_iterable_payload_caching() -> None:
+ """Test AsyncIterablePayload caching behavior."""
+
+ async def gen() -> AsyncIterator[bytes]:
+ yield b"Hello"
+ yield b" "
+ yield b"World"
+
+ p = payload.AsyncIterablePayload(gen())
+
+ # First call to as_bytes should consume iterator and cache
+ result1 = await p.as_bytes()
+ assert result1 == b"Hello World"
+ assert p._iter is None # Iterator exhausted
+ assert p._cached_chunks == [b"Hello", b" ", b"World"] # Chunks cached
+ assert p._consumed is False # Not marked as consumed to allow reuse
+
+ # Second call should use cache
+ result2 = await p.as_bytes()
+ assert result2 == b"Hello World"
+ assert p._cached_chunks == [b"Hello", b" ", b"World"] # Still cached
+
+ # decode should work with cached chunks
+ decoded = p.decode()
+ assert decoded == "Hello World"
+
+ # write_with_length should use cached chunks
+ writer = MockStreamWriter()
+ await p.write_with_length(writer, None)
+ assert writer.get_written_bytes() == b"Hello World"
+
+ # write_with_length with limit should respect it
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, 5)
+ assert writer2.get_written_bytes() == b"Hello"
+
+
+async def test_async_iterable_payload_decode_without_cache() -> None:
+ """Test AsyncIterablePayload decode raises error without cache."""
+
+ async def gen() -> AsyncIterator[bytes]:
+ yield b"test"
+
+ p = payload.AsyncIterablePayload(gen())
+
+ # decode should raise without cache
+ with pytest.raises(TypeError) as excinfo:
+ p.decode()
+ assert "Unable to decode - content not cached" in str(excinfo.value)
+
+ # After as_bytes, decode should work
+ await p.as_bytes()
+ assert p.decode() == "test"
+
+
+async def test_async_iterable_payload_write_then_cache() -> None:
+ """Test AsyncIterablePayload behavior when written before caching."""
+
+ async def gen() -> AsyncIterator[bytes]:
+ yield b"Hello"
+ yield b"World"
+
+ p = payload.AsyncIterablePayload(gen())
+
+ # First write without caching (streaming)
+ writer1 = MockStreamWriter()
+ await p.write_with_length(writer1, None)
+ assert writer1.get_written_bytes() == b"HelloWorld"
+ assert p._iter is None # Iterator exhausted
+ assert p._cached_chunks is None # No cache created
+ assert p._consumed is True # Marked as consumed
+
+ # Subsequent operations should handle exhausted iterator
+ result = await p.as_bytes()
+ assert result == b"" # Empty since iterator exhausted without cache
+
+ # Write should also be empty
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, None)
+ assert writer2.get_written_bytes() == b""
+
+
+async def test_bytes_payload_reusability() -> None:
+ """Test that BytesPayload can be written and read multiple times."""
+ data = b"test payload data"
+ p = payload.BytesPayload(data)
+
+ # First write_with_length
+ writer1 = MockStreamWriter()
+ await p.write_with_length(writer1, None)
+ assert writer1.get_written_bytes() == data
+
+ # Second write_with_length (simulating redirect)
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, None)
+ assert writer2.get_written_bytes() == data
+
+ # Write with partial length
+ writer3 = MockStreamWriter()
+ await p.write_with_length(writer3, 5)
+ assert writer3.get_written_bytes() == b"test "
+
+ # Test as_bytes multiple times
+ bytes1 = await p.as_bytes()
+ bytes2 = await p.as_bytes()
+ bytes3 = await p.as_bytes()
+ assert bytes1 == bytes2 == bytes3 == data
+
+
+async def test_string_payload_reusability() -> None:
+ """Test that StringPayload can be written and read multiple times."""
+ text = "test string data"
+ expected_bytes = text.encode("utf-8")
+ p = payload.StringPayload(text)
+
+ # First write_with_length
+ writer1 = MockStreamWriter()
+ await p.write_with_length(writer1, None)
+ assert writer1.get_written_bytes() == expected_bytes
+
+ # Second write_with_length (simulating redirect)
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, None)
+ assert writer2.get_written_bytes() == expected_bytes
+
+ # Write with partial length
+ writer3 = MockStreamWriter()
+ await p.write_with_length(writer3, 5)
+ assert writer3.get_written_bytes() == b"test "
+
+ # Test as_bytes multiple times
+ bytes1 = await p.as_bytes()
+ bytes2 = await p.as_bytes()
+ bytes3 = await p.as_bytes()
+ assert bytes1 == bytes2 == bytes3 == expected_bytes
+
+
+async def test_bytes_io_payload_reusability() -> None:
+ """Test that BytesIOPayload can be written and read multiple times."""
+ data = b"test bytesio payload"
+ bytes_io = io.BytesIO(data)
+ p = payload.BytesIOPayload(bytes_io)
+
+ # First write_with_length
+ writer1 = MockStreamWriter()
+ await p.write_with_length(writer1, None)
+ assert writer1.get_written_bytes() == data
+
+ # Second write_with_length (simulating redirect)
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, None)
+ assert writer2.get_written_bytes() == data
+
+ # Write with partial length
+ writer3 = MockStreamWriter()
+ await p.write_with_length(writer3, 5)
+ assert writer3.get_written_bytes() == b"test "
+
+ # Test as_bytes multiple times
+ bytes1 = await p.as_bytes()
+ bytes2 = await p.as_bytes()
+ bytes3 = await p.as_bytes()
+ assert bytes1 == bytes2 == bytes3 == data
+
+
+async def test_string_io_payload_reusability() -> None:
+ """Test that StringIOPayload can be written and read multiple times."""
+ text = "test stringio payload"
+ expected_bytes = text.encode("utf-8")
+ string_io = io.StringIO(text)
+ p = payload.StringIOPayload(string_io)
+
+ # Note: StringIOPayload reads all content in __init__ and becomes a StringPayload
+ # So it should be fully reusable
+
+ # First write_with_length
+ writer1 = MockStreamWriter()
+ await p.write_with_length(writer1, None)
+ assert writer1.get_written_bytes() == expected_bytes
+
+ # Second write_with_length (simulating redirect)
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, None)
+ assert writer2.get_written_bytes() == expected_bytes
+
+ # Write with partial length
+ writer3 = MockStreamWriter()
+ await p.write_with_length(writer3, 5)
+ assert writer3.get_written_bytes() == b"test "
+
+ # Test as_bytes multiple times
+ bytes1 = await p.as_bytes()
+ bytes2 = await p.as_bytes()
+ bytes3 = await p.as_bytes()
+ assert bytes1 == bytes2 == bytes3 == expected_bytes
+
+
+async def test_buffered_reader_payload_reusability() -> None:
+ """Test that BufferedReaderPayload can be written and read multiple times."""
+ data = b"test buffered reader payload"
+ buffer = io.BufferedReader(io.BytesIO(data)) # type: ignore[arg-type]
+ p = payload.BufferedReaderPayload(buffer)
+
+ # First write_with_length
+ writer1 = MockStreamWriter()
+ await p.write_with_length(writer1, None)
+ assert writer1.get_written_bytes() == data
+
+ # Second write_with_length (simulating redirect)
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, None)
+ assert writer2.get_written_bytes() == data
+
+ # Write with partial length
+ writer3 = MockStreamWriter()
+ await p.write_with_length(writer3, 5)
+ assert writer3.get_written_bytes() == b"test "
+
+ # Test as_bytes multiple times
+ bytes1 = await p.as_bytes()
+ bytes2 = await p.as_bytes()
+ bytes3 = await p.as_bytes()
+ assert bytes1 == bytes2 == bytes3 == data
+
+
+async def test_async_iterable_payload_reusability_with_cache() -> None:
+ """Test that AsyncIterablePayload can be reused when cached via as_bytes."""
+
+ async def gen() -> AsyncIterator[bytes]:
+ yield b"async "
+ yield b"iterable "
+ yield b"payload"
+
+ expected_data = b"async iterable payload"
+ p = payload.AsyncIterablePayload(gen())
+
+ # First call to as_bytes should cache the data
+ bytes1 = await p.as_bytes()
+ assert bytes1 == expected_data
+ assert p._cached_chunks is not None
+ assert p._iter is None # Iterator exhausted
+
+ # Subsequent as_bytes calls should use cache
+ bytes2 = await p.as_bytes()
+ bytes3 = await p.as_bytes()
+ assert bytes1 == bytes2 == bytes3 == expected_data
+
+ # Now writes should also use the cached data
+ writer1 = MockStreamWriter()
+ await p.write_with_length(writer1, None)
+ assert writer1.get_written_bytes() == expected_data
+
+ # Second write should also work
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, None)
+ assert writer2.get_written_bytes() == expected_data
+
+ # Write with partial length
+ writer3 = MockStreamWriter()
+ await p.write_with_length(writer3, 5)
+ assert writer3.get_written_bytes() == b"async"
+
+
+async def test_async_iterable_payload_no_reuse_without_cache() -> None:
+ """Test that AsyncIterablePayload cannot be reused without caching."""
+
+ async def gen() -> AsyncIterator[bytes]:
+ yield b"test "
+ yield b"data"
+
+ p = payload.AsyncIterablePayload(gen())
+
+ # First write exhausts the iterator
+ writer1 = MockStreamWriter()
+ await p.write_with_length(writer1, None)
+ assert writer1.get_written_bytes() == b"test data"
+ assert p._iter is None # Iterator exhausted
+ assert p._consumed is True
+
+ # Second write should produce empty result
+ writer2 = MockStreamWriter()
+ await p.write_with_length(writer2, None)
+ assert writer2.get_written_bytes() == b""
+
+
+async def test_bytes_io_payload_close_does_not_close_io() -> None:
+ """Test that BytesIOPayload close() does not close the underlying BytesIO."""
+ bytes_io = io.BytesIO(b"data")
+ bytes_io_payload = payload.BytesIOPayload(bytes_io)
+
+ # Close the payload
+ await bytes_io_payload.close()
+
+ # BytesIO should NOT be closed
+ assert not bytes_io.closed
+
+ # Can still write after close
+ writer = MockStreamWriter()
+ await bytes_io_payload.write_with_length(writer, None)
+ assert writer.get_written_bytes() == b"data"
+
+
+async def test_custom_payload_backwards_compat_as_bytes() -> None:
+ """Test backwards compatibility for custom Payload that only implements decode()."""
+
+ class LegacyPayload(payload.Payload):
+ """A custom payload that only implements decode() like old code might do."""
+
+ def __init__(self, data: str) -> None:
+ super().__init__(data, headers=CIMultiDict())
+ self._data = data
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """Custom decode implementation."""
+ return self._data
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ """Write implementation which is a no-op for this test."""
+
+ # Create instance with test data
+ p = LegacyPayload("Hello, World!")
+
+ # Test that as_bytes() works even though it's not explicitly implemented
+ # The base class should call decode() and encode the result
+ result = await p.as_bytes()
+ assert result == b"Hello, World!"
+
+ # Test with different text
+ p2 = LegacyPayload("Test with special chars: café")
+ result_utf8 = await p2.as_bytes(encoding="utf-8")
+ assert result_utf8 == "Test with special chars: café".encode()
+
+ # Test that decode() still works as expected
+ assert p.decode() == "Hello, World!"
+ assert p2.decode() == "Test with special chars: café"
+
+
+async def test_custom_payload_with_encoding_backwards_compat() -> None:
+ """Test custom Payload with encoding set uses instance encoding for as_bytes()."""
+
+ class EncodedPayload(payload.Payload):
+ """A custom payload with specific encoding."""
+
+ def __init__(self, data: str, encoding: str) -> None:
+ super().__init__(data, headers=CIMultiDict(), encoding=encoding)
+ self._data = data
+
+ def decode(self, encoding: str = "utf-8", errors: str = "strict") -> str:
+ """Custom decode implementation."""
+ return self._data
+
+ async def write(self, writer: AbstractStreamWriter) -> None:
+ """Write implementation is a no-op."""
+
+ # Create instance with specific encoding
+ p = EncodedPayload("Test data", encoding="latin-1")
+
+ # as_bytes() should use the instance encoding (latin-1) not the default utf-8
+ result = await p.as_bytes()
+ assert result == b"Test data" # ASCII chars are same in latin-1
+
+ # Test with non-ASCII that differs between encodings
+ p2 = EncodedPayload("café", encoding="latin-1")
+ result_latin1 = await p2.as_bytes()
+ assert result_latin1 == "café".encode("latin-1")
+ assert result_latin1 != "café".encode() # Should be different bytes
+
+
+async def test_iobase_payload_close_idempotent() -> None:
+ """Test that IOBasePayload.close() is idempotent and covers the _consumed check."""
+ file_like = io.BytesIO(b"test data")
+ p = payload.IOBasePayload(file_like)
+
+ # First close should set _consumed to True
+ await p.close()
+ assert p._consumed is True
+
+ # Second close should be a no-op due to _consumed check (line 621)
+ await p.close()
+ assert p._consumed is True
+
+
+def test_iobase_payload_decode() -> None:
+ """Test IOBasePayload.decode() returns correct string."""
+ # Test with UTF-8 encoded text
+ text = "Hello, 世界! 🌍"
+ file_like = io.BytesIO(text.encode("utf-8"))
+ p = payload.IOBasePayload(file_like)
+
+ # decode() should return the original string
+ assert p.decode() == text
+
+ # Test with different encoding
+ latin1_text = "café"
+ file_like2 = io.BytesIO(latin1_text.encode("latin-1"))
+ p2 = payload.IOBasePayload(file_like2)
+ assert p2.decode("latin-1") == latin1_text
+
+ # Test that file position is restored
+ file_like3 = io.BytesIO(b"test data")
+ file_like3.read(4) # Move position forward
+ p3 = payload.IOBasePayload(file_like3)
+ # decode() should read from the stored start position (4)
+ assert p3.decode() == " data"
+
+
+def test_bytes_payload_size() -> None:
+ """Test BytesPayload.size property returns correct byte length."""
+ # Test with bytes
+ bp = payload.BytesPayload(b"Hello World")
+ assert bp.size == 11
+
+ # Test with empty bytes
+ bp_empty = payload.BytesPayload(b"")
+ assert bp_empty.size == 0
+
+ # Test with bytearray
+ ba = bytearray(b"Hello World")
+ bp_array = payload.BytesPayload(ba)
+ assert bp_array.size == 11
+
+
+def test_string_payload_size() -> None:
+ """Test StringPayload.size property with different encodings."""
+ # Test ASCII string with default UTF-8 encoding
+ sp = payload.StringPayload("Hello World")
+ assert sp.size == 11
+
+ # Test Unicode string with default UTF-8 encoding
+ unicode_str = "Hello 世界"
+ sp_unicode = payload.StringPayload(unicode_str)
+ assert sp_unicode.size == len(unicode_str.encode("utf-8"))
+
+ # Test with UTF-16 encoding
+ sp_utf16 = payload.StringPayload("Hello World", encoding="utf-16")
+ assert sp_utf16.size == len("Hello World".encode("utf-16"))
+
+ # Test with latin-1 encoding
+ sp_latin1 = payload.StringPayload("café", encoding="latin-1")
+ assert sp_latin1.size == len("café".encode("latin-1"))
+
+
+def test_string_io_payload_size() -> None:
+ """Test StringIOPayload.size property."""
+ # Test normal string
+ sio = StringIO("Hello World")
+ siop = payload.StringIOPayload(sio)
+ assert siop.size == 11
+
+ # Test Unicode string
+ sio_unicode = StringIO("Hello 世界")
+ siop_unicode = payload.StringIOPayload(sio_unicode)
+ assert siop_unicode.size == len("Hello 世界".encode())
+
+ # Test with custom encoding
+ sio_custom = StringIO("Hello")
+ siop_custom = payload.StringIOPayload(sio_custom, encoding="utf-16")
+ assert siop_custom.size == len("Hello".encode("utf-16"))
+
+ # Test with emoji to ensure correct byte count
+ sio_emoji = StringIO("Hello 👋🌍")
+ siop_emoji = payload.StringIOPayload(sio_emoji)
+ assert siop_emoji.size == len("Hello 👋🌍".encode())
+ # Verify it's not the string length
+ assert siop_emoji.size != len("Hello 👋🌍")
+
+
+def test_all_string_payloads_size_is_bytes() -> None:
+ """Test that all string-like payload classes report size in bytes, not string length."""
+ # Test string with multibyte characters
+ test_str = "Hello 👋 世界 🌍" # Contains emoji and Chinese characters
+
+ # StringPayload
+ sp = payload.StringPayload(test_str)
+ assert sp.size == len(test_str.encode("utf-8"))
+ assert sp.size != len(test_str) # Ensure it's not string length
+
+ # StringIOPayload
+ sio = StringIO(test_str)
+ siop = payload.StringIOPayload(sio)
+ assert siop.size == len(test_str.encode("utf-8"))
+ assert siop.size != len(test_str)
+
+ # Test with different encoding
+ sp_utf16 = payload.StringPayload(test_str, encoding="utf-16")
+ assert sp_utf16.size == len(test_str.encode("utf-16"))
+ assert sp_utf16.size != sp.size # Different encoding = different size
+
+ # JsonPayload (which extends BytesPayload)
+ json_data = {"message": test_str}
+ jp = payload.JsonPayload(json_data)
+ # JSON escapes Unicode, so we need to check the actual encoded size
+ json_str = json.dumps(json_data)
+ assert jp.size == len(json_str.encode("utf-8"))
+
+ # Test JsonPayload with ensure_ascii=False to get actual UTF-8 encoding
+ jp_utf8 = payload.JsonPayload(
+ json_data, dumps=lambda x: json.dumps(x, ensure_ascii=False)
+ )
+ json_str_utf8 = json.dumps(json_data, ensure_ascii=False)
+ assert jp_utf8.size == len(json_str_utf8.encode("utf-8"))
+ assert jp_utf8.size != len(
+ json_str_utf8
+ ) # Now it's different due to multibyte chars
+
+
+def test_bytes_io_payload_size() -> None:
+ """Test BytesIOPayload.size property."""
+ # Test normal bytes
+ bio = io.BytesIO(b"Hello World")
+ biop = payload.BytesIOPayload(bio)
+ assert biop.size == 11
+
+ # Test empty BytesIO
+ bio_empty = io.BytesIO(b"")
+ biop_empty = payload.BytesIOPayload(bio_empty)
+ assert biop_empty.size == 0
+
+ # Test with position not at start
+ bio_pos = io.BytesIO(b"Hello World")
+ bio_pos.seek(5)
+ biop_pos = payload.BytesIOPayload(bio_pos)
+ assert biop_pos.size == 6 # Size should be from position to end
+
+
+def test_json_payload_size() -> None:
+ """Test JsonPayload.size property."""
+ # Test simple dict
+ data = {"hello": "world"}
+ jp = payload.JsonPayload(data)
+ expected_json = json.dumps(data) # Use actual json.dumps output
+ assert jp.size == len(expected_json.encode("utf-8"))
+
+ # Test with Unicode
+ data_unicode = {"message": "Hello 世界"}
+ jp_unicode = payload.JsonPayload(data_unicode)
+ expected_unicode = json.dumps(data_unicode)
+ assert jp_unicode.size == len(expected_unicode.encode("utf-8"))
+
+ # Test with custom encoding
+ data_custom = {"test": "data"}
+ jp_custom = payload.JsonPayload(data_custom, encoding="utf-16")
+ expected_custom = json.dumps(data_custom)
+ assert jp_custom.size == len(expected_custom.encode("utf-16"))
+
+
+async def test_text_io_payload_size_matches_file_encoding(tmp_path: Path) -> None:
+ """Test TextIOPayload.size when file encoding matches payload encoding."""
+ # Create UTF-8 file
+ utf8_file = tmp_path / "test_utf8.txt"
+ content = "Hello 世界"
+
+ # Write file in executor
+ loop = asyncio.get_running_loop()
+ await loop.run_in_executor(None, utf8_file.write_text, content, "utf-8")
+
+ # Open file in executor
+ def open_file() -> TextIO:
+ return open(utf8_file, encoding="utf-8")
+
+ f = await loop.run_in_executor(None, open_file)
+ try:
+ tiop = payload.TextIOPayload(f)
+ # Size should match the actual UTF-8 encoded size
+ assert tiop.size == len(content.encode("utf-8"))
+ finally:
+ await loop.run_in_executor(None, f.close)
+
+
+async def test_text_io_payload_size_utf16(tmp_path: Path) -> None:
+ """Test TextIOPayload.size reports correct size with utf-16."""
+ # Create UTF-16 file
+ utf16_file = tmp_path / "test_utf16.txt"
+ content = "Hello World"
+
+ loop = asyncio.get_running_loop()
+ # Write file in executor
+ await loop.run_in_executor(None, utf16_file.write_text, content, "utf-16")
+
+ # Get file size in executor
+ utf16_file_size = await loop.run_in_executor(
+ None, lambda: utf16_file.stat().st_size
+ )
+
+ # Open file in executor
+ def open_file() -> TextIO:
+ return open(utf16_file, encoding="utf-16")
+
+ f = await loop.run_in_executor(None, open_file)
+ try:
+ tiop = payload.TextIOPayload(f, encoding="utf-16")
+ # Payload reports file size on disk (UTF-16)
+ assert tiop.size == utf16_file_size
+
+ # Write to a buffer to see what actually gets sent
+ writer = BufferWriter()
+ await tiop.write(writer)
+
+ # Check that the actual written bytes match file size
+ assert len(writer.buffer) == utf16_file_size
+ finally:
+ await loop.run_in_executor(None, f.close)
From 8658faad0f2ecb129ae33a3e0f3d38775c63911e Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Mon, 26 May 2025 13:51:27 +0100
Subject: [PATCH 03/13] Correct type of ClientRequest.body (#11011)
Co-authored-by: J. Nick Koston
---
aiohttp/client_middleware_digest_auth.py | 10 ++++++----
aiohttp/client_reqrep.py | 3 ++-
docs/client_reference.rst | 7 ++-----
tests/test_client_middleware_digest_auth.py | 6 +++---
tests/test_client_request.py | 16 ++++++++--------
5 files changed, 21 insertions(+), 21 deletions(-)
diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py
index 9a8ffc18313..b2daf76e6bb 100644
--- a/aiohttp/client_middleware_digest_auth.py
+++ b/aiohttp/client_middleware_digest_auth.py
@@ -193,7 +193,9 @@ def __init__(
self._nonce_count = 0
self._challenge: DigestAuthChallenge = {}
- async def _encode(self, method: str, url: URL, body: Union[bytes, Payload]) -> str:
+ async def _encode(
+ self, method: str, url: URL, body: Union[Payload, Literal[b""]]
+ ) -> str:
"""
Build digest authorization header for the current challenge.
@@ -274,10 +276,10 @@ def KD(s: bytes, d: bytes) -> bytes:
A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes))
A2 = f"{method.upper()}:{path}".encode()
if qop == "auth-int":
- if isinstance(body, bytes): # will always be empty bytes unless Payload
- entity_bytes = body
- else:
+ if isinstance(body, Payload): # will always be empty bytes unless Payload
entity_bytes = await body.as_bytes() # Get bytes from Payload
+ else:
+ entity_bytes = body
entity_hash = H(entity_bytes)
A2 = b":".join((A2, entity_hash))
diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py
index 59a11be3764..618d2332647 100644
--- a/aiohttp/client_reqrep.py
+++ b/aiohttp/client_reqrep.py
@@ -17,6 +17,7 @@
Dict,
Iterable,
List,
+ Literal,
Mapping,
NamedTuple,
Optional,
@@ -393,7 +394,7 @@ def port(self) -> Optional[int]:
return self.url.port
@property
- def body(self) -> Union[bytes, payload.Payload]:
+ def body(self) -> Union[payload.Payload, Literal[b""]]:
"""Request body."""
# empty body is represented as bytes for backwards compatibility
return self._body or b""
diff --git a/docs/client_reference.rst b/docs/client_reference.rst
index b08df9c05ba..287eba0e89d 100644
--- a/docs/client_reference.rst
+++ b/docs/client_reference.rst
@@ -1866,12 +1866,9 @@ ClientRequest
For more information about using middleware, see :ref:`aiohttp-client-middleware`.
.. attribute:: body
- :type: Payload | FormData
+ :type: Payload | Literal[b""]
- The request body payload. This can be:
-
- - A :class:`Payload` object for raw data (default is empty bytes ``b""``)
- - A :class:`FormData` object for form submissions
+ The request body payload (defaults to ``b""`` if no body passed).
.. danger::
diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py
index 6da6850bafc..b649e0b601f 100644
--- a/tests/test_client_middleware_digest_auth.py
+++ b/tests/test_client_middleware_digest_auth.py
@@ -2,7 +2,7 @@
import io
from hashlib import md5, sha1
-from typing import Generator, Union
+from typing import Generator, Literal, Union
from unittest import mock
import pytest
@@ -270,7 +270,7 @@ def KD(secret: str, data: str) -> str:
@pytest.mark.parametrize(
("body", "body_str"),
[
- (b"this is a body", "this is a body"), # Bytes case
+ (b"", ""), # Bytes case
(
BytesIOPayload(io.BytesIO(b"this is a body")),
"this is a body",
@@ -280,7 +280,7 @@ def KD(secret: str, data: str) -> str:
async def test_digest_response_exact_match(
qop: str,
algorithm: str,
- body: Union[bytes, BytesIOPayload],
+ body: Union[Literal[b""], BytesIOPayload],
body_str: str,
mock_sha1_digest: mock.MagicMock,
) -> None:
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index 361163c87a0..74e23aeb4e1 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -1320,7 +1320,7 @@ async def test_oserror_on_write_bytes(
loop: asyncio.AbstractEventLoop, conn: mock.Mock
) -> None:
req = ClientRequest("POST", URL("http://python.org/"), loop=loop)
- req.body = b"test data"
+ req.body = b"test data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
writer = WriterMock()
writer.write.side_effect = OSError
@@ -1668,7 +1668,7 @@ async def test_write_bytes_with_content_length_limit(
data = b"Hello World"
req = ClientRequest("post", URL("http://python.org/"), loop=loop)
- req.body = data
+ req.body = data # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
writer = StreamWriter(protocol=conn.protocol, loop=loop)
# Use content_length=5 to truncate data
@@ -1705,7 +1705,7 @@ async def gen() -> AsyncIterator[bytes]:
req.body = gen() # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
else:
- req.body = data
+ req.body = data # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
writer = StreamWriter(protocol=conn.protocol, loop=loop)
# Use content_length=7 to truncate at the middle of Part2
@@ -1755,7 +1755,7 @@ async def test_warn_if_unclosed_payload_via_body_setter(
ResourceWarning,
match="The previous request body contains unclosed resources",
):
- req.body = b"new data"
+ req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
await req.close()
@@ -1773,7 +1773,7 @@ async def test_no_warn_for_autoclose_payload_via_body_setter(
# Setting body again should not trigger warning since previous payload has autoclose=True
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always")
- req.body = b"new data"
+ req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
# Filter out any non-ResourceWarning warnings
resource_warnings = [
@@ -1803,7 +1803,7 @@ async def test_no_warn_for_consumed_payload_via_body_setter(
# Setting body again should not trigger warning since previous payload is consumed
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always")
- req.body = b"new data"
+ req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
# Filter out any non-ResourceWarning warnings
resource_warnings = [
@@ -1922,7 +1922,7 @@ async def test_body_setter_closes_previous_payload(
req._body = mock_payload
# Update body with new data using setter
- req.body = b"new body data"
+ req.body = b"new body data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
# Verify the previous payload was closed using _close
mock_payload._close.assert_called_once()
@@ -2051,7 +2051,7 @@ async def test_warn_stacklevel_points_to_user_code(
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always", ResourceWarning)
# This line should be reported as the warning source
- req.body = b"new data" # LINE TO BE REPORTED
+ req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # LINE TO BE REPORTED
# Find the ResourceWarning
resource_warnings = [
From faea0d64e7f477d69363d2a40ed7f5b319905912 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 26 May 2025 12:55:06 +0000
Subject: [PATCH 04/13] Bump cryptography from 45.0.2 to 45.0.3 (#11021)
Bumps [cryptography](https://github.com/pyca/cryptography) from 45.0.2
to 45.0.3.
Changelog
Sourced from cryptography's
changelog.
45.0.3 - 2025-05-25
* Fixed decrypting PKCS#8 files encrypted with long salts (this impacts
keys
encrypted by Bouncy Castle).
* Fixed decrypting PKCS#8 files encrypted with DES-CBC-MD5. While wildly
insecure, this remains prevalent.
.. _v45-0-2:
Commits
[](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores)
Dependabot will resolve any conflicts with this PR as long as you don't
alter it yourself. You can also trigger a rebase manually by commenting
`@dependabot rebase`.
[//]: # (dependabot-automerge-start)
[//]: # (dependabot-automerge-end)
---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR:
- `@dependabot rebase` will rebase this PR
- `@dependabot recreate` will recreate this PR, overwriting any edits
that have been made to it
- `@dependabot merge` will merge this PR after your CI passes on it
- `@dependabot squash and merge` will squash and merge this PR after
your CI passes on it
- `@dependabot cancel merge` will cancel a previously requested merge
and block automerging
- `@dependabot reopen` will reopen this PR if it is closed
- `@dependabot close` will close this PR and stop Dependabot recreating
it. You can achieve the same result by closing it manually
- `@dependabot show ignore conditions` will show all
of the ignore conditions of the specified dependency
- `@dependabot ignore this major version` will close this PR and stop
Dependabot creating any more for this major version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this minor version` will close this PR and stop
Dependabot creating any more for this minor version (unless you reopen
the PR or upgrade to it yourself)
- `@dependabot ignore this dependency` will close this PR and stop
Dependabot creating any more for this dependency (unless you reopen the
PR or upgrade to it yourself)
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
requirements/constraints.txt | 2 +-
requirements/dev.txt | 2 +-
requirements/lint.txt | 2 +-
requirements/test.txt | 2 +-
4 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/requirements/constraints.txt b/requirements/constraints.txt
index 052df257fe8..6fbf2c61a5a 100644
--- a/requirements/constraints.txt
+++ b/requirements/constraints.txt
@@ -56,7 +56,7 @@ coverage==7.8.2
# via
# -r requirements/test.in
# pytest-cov
-cryptography==45.0.2
+cryptography==45.0.3
# via
# pyjwt
# trustme
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 1f00948a5d0..78a718c65be 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -56,7 +56,7 @@ coverage==7.8.2
# via
# -r requirements/test.in
# pytest-cov
-cryptography==45.0.2
+cryptography==45.0.3
# via
# pyjwt
# trustme
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 77df1c8ade8..a2917ec48e0 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -21,7 +21,7 @@ cfgv==3.4.0
# via pre-commit
click==8.1.8
# via slotscheck
-cryptography==45.0.2
+cryptography==45.0.3
# via trustme
distlib==0.3.9
# via virtualenv
diff --git a/requirements/test.txt b/requirements/test.txt
index db685044f69..fa449a9de51 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -29,7 +29,7 @@ coverage==7.8.2
# via
# -r requirements/test.in
# pytest-cov
-cryptography==45.0.2
+cryptography==45.0.3
# via trustme
exceptiongroup==1.3.0
# via pytest
From 852297cf0e3d57b855834d183bd7d87e38f9c8f2 Mon Sep 17 00:00:00 2001
From: "J. Nick Koston"
Date: Mon, 26 May 2025 08:03:04 -0500
Subject: [PATCH 05/13] Cleanup some type ignores in the client request tests
(#11020)
---
tests/test_client_request.py | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index 74e23aeb4e1..f736bd0e224 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -37,6 +37,7 @@
from aiohttp.compression_utils import ZLibBackend
from aiohttp.connector import Connection
from aiohttp.http import HttpVersion10, HttpVersion11, StreamWriter
+from aiohttp.multipart import MultipartWriter
from aiohttp.typedefs import LooseCookies
@@ -757,7 +758,8 @@ async def test_formdata_boundary_from_headers(
)
async with await req.send(conn):
await asyncio.sleep(0)
- assert req.body._boundary == boundary.encode() # type: ignore[union-attr]
+ assert isinstance(req.body, MultipartWriter)
+ assert req.body._boundary == boundary.encode()
async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None:
@@ -767,7 +769,8 @@ async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> No
)
resp = await req.send(conn)
assert "/" == req.url.path
- assert b"life=42" == req.body._value # type: ignore[union-attr]
+ assert isinstance(req.body, payload.Payload)
+ assert b"life=42" == req.body._value
assert "application/x-www-form-urlencoded" == req.headers["CONTENT-TYPE"]
await req.close()
resp.close()
@@ -806,7 +809,8 @@ async def test_get_with_data(loop: asyncio.AbstractEventLoop) -> None:
meth, URL("http://python.org/"), data={"life": "42"}, loop=loop
)
assert "/" == req.url.path
- assert b"life=42" == req.body._value # type: ignore[union-attr]
+ assert isinstance(req.body, payload.Payload)
+ assert b"life=42" == req.body._value
await req.close()
From 6d13ccca04fbc8962fb399a5549e86464593e9fc Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Mon, 26 May 2025 08:16:03 -0500
Subject: [PATCH 06/13] [PR #11011/8658faad backport][3.12] Correct type of
ClientRequest.body (#11023)
Co-authored-by: Sam Bull
Co-authored-by: J. Nick Koston
---
aiohttp/client_middleware_digest_auth.py | 10 ++++++----
aiohttp/client_reqrep.py | 3 ++-
docs/client_reference.rst | 7 ++-----
tests/test_client_middleware_digest_auth.py | 6 +++---
tests/test_client_request.py | 16 ++++++++--------
5 files changed, 21 insertions(+), 21 deletions(-)
diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py
index 9a8ffc18313..b2daf76e6bb 100644
--- a/aiohttp/client_middleware_digest_auth.py
+++ b/aiohttp/client_middleware_digest_auth.py
@@ -193,7 +193,9 @@ def __init__(
self._nonce_count = 0
self._challenge: DigestAuthChallenge = {}
- async def _encode(self, method: str, url: URL, body: Union[bytes, Payload]) -> str:
+ async def _encode(
+ self, method: str, url: URL, body: Union[Payload, Literal[b""]]
+ ) -> str:
"""
Build digest authorization header for the current challenge.
@@ -274,10 +276,10 @@ def KD(s: bytes, d: bytes) -> bytes:
A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes))
A2 = f"{method.upper()}:{path}".encode()
if qop == "auth-int":
- if isinstance(body, bytes): # will always be empty bytes unless Payload
- entity_bytes = body
- else:
+ if isinstance(body, Payload): # will always be empty bytes unless Payload
entity_bytes = await body.as_bytes() # Get bytes from Payload
+ else:
+ entity_bytes = body
entity_hash = H(entity_bytes)
A2 = b":".join((A2, entity_hash))
diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py
index 2322a1d7472..614751a17bd 100644
--- a/aiohttp/client_reqrep.py
+++ b/aiohttp/client_reqrep.py
@@ -17,6 +17,7 @@
Dict,
Iterable,
List,
+ Literal,
Mapping,
NamedTuple,
Optional,
@@ -459,7 +460,7 @@ def port(self) -> Optional[int]:
return self.url.port
@property
- def body(self) -> Union[bytes, payload.Payload]:
+ def body(self) -> Union[payload.Payload, Literal[b""]]:
"""Request body."""
# empty body is represented as bytes for backwards compatibility
return self._body or b""
diff --git a/docs/client_reference.rst b/docs/client_reference.rst
index d3c2226aee0..40fd7cdb276 100644
--- a/docs/client_reference.rst
+++ b/docs/client_reference.rst
@@ -1882,12 +1882,9 @@ ClientRequest
For more information about using middleware, see :ref:`aiohttp-client-middleware`.
.. attribute:: body
- :type: Payload | FormData
+ :type: Payload | Literal[b""]
- The request body payload. This can be:
-
- - A :class:`Payload` object for raw data (default is empty bytes ``b""``)
- - A :class:`FormData` object for form submissions
+ The request body payload (defaults to ``b""`` if no body passed).
.. danger::
diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py
index 6da6850bafc..b649e0b601f 100644
--- a/tests/test_client_middleware_digest_auth.py
+++ b/tests/test_client_middleware_digest_auth.py
@@ -2,7 +2,7 @@
import io
from hashlib import md5, sha1
-from typing import Generator, Union
+from typing import Generator, Literal, Union
from unittest import mock
import pytest
@@ -270,7 +270,7 @@ def KD(secret: str, data: str) -> str:
@pytest.mark.parametrize(
("body", "body_str"),
[
- (b"this is a body", "this is a body"), # Bytes case
+ (b"", ""), # Bytes case
(
BytesIOPayload(io.BytesIO(b"this is a body")),
"this is a body",
@@ -280,7 +280,7 @@ def KD(secret: str, data: str) -> str:
async def test_digest_response_exact_match(
qop: str,
algorithm: str,
- body: Union[bytes, BytesIOPayload],
+ body: Union[Literal[b""], BytesIOPayload],
body_str: str,
mock_sha1_digest: mock.MagicMock,
) -> None:
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index b1807b96d82..f880bb0859f 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -1261,7 +1261,7 @@ def read(self, decode=False):
async def test_oserror_on_write_bytes(loop, conn) -> None:
req = ClientRequest("POST", URL("http://python.org/"), loop=loop)
- req.body = b"test data"
+ req.body = b"test data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
writer = WriterMock()
writer.write.side_effect = OSError
@@ -1618,7 +1618,7 @@ async def test_write_bytes_with_content_length_limit(
data = b"Hello World"
req = ClientRequest("post", URL("http://python.org/"), loop=loop)
- req.body = data
+ req.body = data # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
writer = StreamWriter(protocol=conn.protocol, loop=loop)
# Use content_length=5 to truncate data
@@ -1655,7 +1655,7 @@ async def gen() -> AsyncIterator[bytes]:
req.body = gen() # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
else:
- req.body = data
+ req.body = data # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
writer = StreamWriter(protocol=conn.protocol, loop=loop)
# Use content_length=7 to truncate at the middle of Part2
@@ -1705,7 +1705,7 @@ async def test_warn_if_unclosed_payload_via_body_setter(
ResourceWarning,
match="The previous request body contains unclosed resources",
):
- req.body = b"new data"
+ req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
await req.close()
@@ -1723,7 +1723,7 @@ async def test_no_warn_for_autoclose_payload_via_body_setter(
# Setting body again should not trigger warning since previous payload has autoclose=True
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always")
- req.body = b"new data"
+ req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
# Filter out any non-ResourceWarning warnings
resource_warnings = [
@@ -1753,7 +1753,7 @@ async def test_no_warn_for_consumed_payload_via_body_setter(
# Setting body again should not trigger warning since previous payload is consumed
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always")
- req.body = b"new data"
+ req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
# Filter out any non-ResourceWarning warnings
resource_warnings = [
@@ -1872,7 +1872,7 @@ async def test_body_setter_closes_previous_payload(
req._body = mock_payload
# Update body with new data using setter
- req.body = b"new body data"
+ req.body = b"new body data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892
# Verify the previous payload was closed using _close
mock_payload._close.assert_called_once()
@@ -2001,7 +2001,7 @@ async def test_warn_stacklevel_points_to_user_code(
with warnings.catch_warnings(record=True) as warning_list:
warnings.simplefilter("always", ResourceWarning)
# This line should be reported as the warning source
- req.body = b"new data" # LINE TO BE REPORTED
+ req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # LINE TO BE REPORTED
# Find the ResourceWarning
resource_warnings = [
From 9b671b2b37cd4385a3c88faaf1bb25ec0697f77f Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Mon, 26 May 2025 14:34:44 +0100
Subject: [PATCH 07/13] Move ClientResponse to top of file (#11025)
No code changes
---
aiohttp/client_reqrep.py | 2042 +++++++++++++++++++-------------------
1 file changed, 1020 insertions(+), 1022 deletions(-)
diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py
index 618d2332647..6823da38346 100644
--- a/aiohttp/client_reqrep.py
+++ b/aiohttp/client_reqrep.py
@@ -96,6 +96,7 @@
from .tracing import Trace
+_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed")
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
@@ -210,1105 +211,1208 @@ def _warn_if_unclosed_payload(payload: payload.Payload, stacklevel: int = 2) ->
)
-class ClientRequest:
- GET_METHODS = {
- hdrs.METH_GET,
- hdrs.METH_HEAD,
- hdrs.METH_OPTIONS,
- hdrs.METH_TRACE,
- }
- POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
- ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
-
- DEFAULT_HEADERS = {
- hdrs.ACCEPT: "*/*",
- hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
- }
-
- # Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
- _body: Union[None, payload.Payload] = None
- auth = None
- response = None
+class ClientResponse(HeadersMixin):
+ # Some of these attributes are None when created,
+ # but will be set by the start() method.
+ # As the end user will likely never see the None values, we cheat the types below.
+ # from the Status-Line of the response
+ version: Optional[HttpVersion] = None # HTTP-Version
+ status: int = None # type: ignore[assignment] # Status-Code
+ reason: Optional[str] = None # Reason-Phrase
- # These class defaults help create_autospec() work correctly.
- # If autospec is improved in future, maybe these can be removed.
- url = URL()
- method = "GET"
+ content: StreamReader = None # type: ignore[assignment] # Payload stream
+ _body: Optional[bytes] = None
+ _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
+ _history: Tuple["ClientResponse", ...] = ()
+ _raw_headers: RawHeaders = None # type: ignore[assignment]
- __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data
- _continue = None # waiter future for '100 Continue' response
+ _connection: Optional["Connection"] = None # current connection
+ _cookies: Optional[SimpleCookie] = None
+ _continue: Optional["asyncio.Future[bool]"] = None
+ _source_traceback: Optional[traceback.StackSummary] = None
+ _session: Optional["ClientSession"] = None
+ # set up by ClientRequest after ClientResponse object creation
+ # post-init stage allows to not change ctor signature
+ _closed = True # to allow __del__ for non-initialized properly response
+ _released = False
+ _in_context = False
- _skip_auto_headers: Optional["CIMultiDict[None]"] = None
+ _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8"
- # N.B.
- # Adding __del__ method with self._writer closing doesn't make sense
- # because _writer is instance method, thus it keeps a reference to self.
- # Until writer has finished finalizer will not be called.
+ __writer: Optional["asyncio.Task[None]"] = None
def __init__(
self,
method: str,
url: URL,
*,
- params: Query = None,
- headers: Optional[LooseHeaders] = None,
- skip_auto_headers: Optional[Iterable[str]] = None,
- data: Any = None,
- cookies: Optional[LooseCookies] = None,
- auth: Optional[BasicAuth] = None,
- version: http.HttpVersion = http.HttpVersion11,
- compress: Union[str, bool] = False,
- chunked: Optional[bool] = None,
- expect100: bool = False,
+ writer: "Optional[asyncio.Task[None]]",
+ continue100: Optional["asyncio.Future[bool]"],
+ timer: Optional[BaseTimerContext],
+ request_info: RequestInfo,
+ traces: List["Trace"],
loop: asyncio.AbstractEventLoop,
- response_class: Optional[Type["ClientResponse"]] = None,
- proxy: Optional[URL] = None,
- proxy_auth: Optional[BasicAuth] = None,
- timer: Optional[BaseTimerContext] = None,
- session: Optional["ClientSession"] = None,
- ssl: Union[SSLContext, bool, Fingerprint] = True,
- proxy_headers: Optional[LooseHeaders] = None,
- traces: Optional[List["Trace"]] = None,
- trust_env: bool = False,
- server_hostname: Optional[str] = None,
- ):
- if match := _CONTAINS_CONTROL_CHAR_RE.search(method):
- raise ValueError(
- f"Method cannot contain non-token characters {method!r} "
- f"(found at least {match.group()!r})"
- )
+ session: "ClientSession",
+ ) -> None:
# URL forbids subclasses, so a simple type check is enough.
- assert type(url) is URL, url
- if proxy is not None:
- assert type(proxy) is URL, proxy
- # FIXME: session is None in tests only, need to fix tests
- # assert session is not None
- if TYPE_CHECKING:
- assert session is not None
- self._session = session
- if params:
- url = url.extend_query(params)
- self.original_url = url
- self.url = url.with_fragment(None) if url.raw_fragment else url
- self.method = method.upper()
- self.chunked = chunked
- self.loop = loop
- self.length = None
- if response_class is None:
- real_response_class = ClientResponse
- else:
- real_response_class = response_class
- self.response_class: Type[ClientResponse] = real_response_class
- self._timer = timer if timer is not None else TimerNoop()
- self._ssl = ssl
- self.server_hostname = server_hostname
+ assert type(url) is URL
+
+ self.method = method
+ self._real_url = url
+ self._url = url.with_fragment(None) if url.raw_fragment else url
+ if writer is not None:
+ self._writer = writer
+ if continue100 is not None:
+ self._continue = continue100
+ self._request_info = request_info
+ self._timer = timer if timer is not None else TimerNoop()
+ self._cache: Dict[str, Any] = {}
+ self._traces = traces
+ self._loop = loop
+ # Save reference to _resolve_charset, so that get_encoding() will still
+ # work after the response has finished reading the body.
+ # TODO: Fix session=None in tests (see ClientRequest.__init__).
+ if session is not None:
+ # store a reference to session #1985
+ self._session = session
+ self._resolve_charset = session._resolve_charset
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
- self.update_version(version)
- self.update_host(url)
- self.update_headers(headers)
- self.update_auto_headers(skip_auto_headers)
- self.update_cookies(cookies)
- self.update_content_encoding(data, compress)
- self.update_auth(auth, trust_env)
- self.update_proxy(proxy, proxy_auth, proxy_headers)
-
- self.update_body_from_data(data)
- if data is not None or self.method not in self.GET_METHODS:
- self.update_transfer_encoding()
- self.update_expect_continue(expect100)
- self._traces = [] if traces is None else traces
-
def __reset_writer(self, _: object = None) -> None:
self.__writer = None
- def _get_content_length(self) -> Optional[int]:
- """Extract and validate Content-Length header value.
-
- Returns parsed Content-Length value or None if not set.
- Raises ValueError if header exists but cannot be parsed as an integer.
- """
- if hdrs.CONTENT_LENGTH not in self.headers:
- return None
-
- content_length_hdr = self.headers[hdrs.CONTENT_LENGTH]
- try:
- return int(content_length_hdr)
- except ValueError:
- raise ValueError(
- f"Invalid Content-Length header: {content_length_hdr}"
- ) from None
-
- @property
- def skip_auto_headers(self) -> CIMultiDict[None]:
- return self._skip_auto_headers or CIMultiDict()
-
@property
def _writer(self) -> Optional["asyncio.Task[None]"]:
+ """The writer task for streaming data.
+
+ _writer is only provided for backwards compatibility
+ for subclasses that may need to access it.
+ """
return self.__writer
@_writer.setter
- def _writer(self, writer: "asyncio.Task[None]") -> None:
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
+ """Set the writer task for streaming data."""
if self.__writer is not None:
self.__writer.remove_done_callback(self.__reset_writer)
self.__writer = writer
- writer.add_done_callback(self.__reset_writer)
-
- def is_ssl(self) -> bool:
- return self.url.scheme in _SSL_SCHEMES
+ if writer is None:
+ return
+ if writer.done():
+ # The writer is already done, so we can clear it immediately.
+ self.__writer = None
+ else:
+ writer.add_done_callback(self.__reset_writer)
@property
- def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
- return self._ssl
+ def cookies(self) -> SimpleCookie:
+ if self._cookies is None:
+ self._cookies = SimpleCookie()
+ return self._cookies
- @property
- def connection_key(self) -> ConnectionKey: # type: ignore[misc]
- if proxy_headers := self.proxy_headers:
- h: Optional[int] = hash(tuple(proxy_headers.items()))
- else:
- h = None
- url = self.url
- return tuple.__new__(
- ConnectionKey,
- (
- url.raw_host or "",
- url.port,
- url.scheme in _SSL_SCHEMES,
- self._ssl,
- self.proxy,
- self.proxy_auth,
- h,
- ),
- )
+ @cookies.setter
+ def cookies(self, cookies: SimpleCookie) -> None:
+ self._cookies = cookies
- @property
- def host(self) -> str:
- ret = self.url.raw_host
- assert ret is not None
- return ret
+ @reify
+ def url(self) -> URL:
+ return self._url
- @property
- def port(self) -> Optional[int]:
- return self.url.port
+ @reify
+ def real_url(self) -> URL:
+ return self._real_url
- @property
- def body(self) -> Union[payload.Payload, Literal[b""]]:
- """Request body."""
- # empty body is represented as bytes for backwards compatibility
- return self._body or b""
+ @reify
+ def host(self) -> str:
+ assert self._url.host is not None
+ return self._url.host
- @body.setter
- def body(self, value: Any) -> None:
- """Set request body with warning for non-autoclose payloads.
+ @reify
+ def headers(self) -> "CIMultiDictProxy[str]":
+ return self._headers
- WARNING: This setter must be called from within an event loop and is not
- thread-safe. Setting body outside of an event loop may raise RuntimeError
- when closing file-based payloads.
+ @reify
+ def raw_headers(self) -> RawHeaders:
+ return self._raw_headers
- DEPRECATED: Direct assignment to body is deprecated and will be removed
- in a future version. Use await update_body() instead for proper resource
- management.
- """
- # Close existing payload if present
- if self._body is not None:
- # Warn if the payload needs manual closing
- # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload
- _warn_if_unclosed_payload(self._body, stacklevel=3)
- # NOTE: In the future, when we remove sync close support,
- # this setter will need to be removed and only the async
- # update_body() method will be available. For now, we call
- # _close() for backwards compatibility.
- self._body._close()
- self._update_body(value)
-
- @property
+ @reify
def request_info(self) -> RequestInfo:
- headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
- # These are created on every request, so we use a NamedTuple
- # for performance reasons. We don't use the RequestInfo.__new__
- # method because it has a different signature which is provided
- # for backwards compatibility only.
- return tuple.__new__(
- RequestInfo, (self.url, self.method, headers, self.original_url)
- )
+ return self._request_info
- @property
- def session(self) -> "ClientSession":
- """Return the ClientSession instance.
+ @reify
+ def content_disposition(self) -> Optional[ContentDisposition]:
+ raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
+ if raw is None:
+ return None
+ disposition_type, params_dct = multipart.parse_content_disposition(raw)
+ params = MappingProxyType(params_dct)
+ filename = multipart.content_disposition_filename(params)
+ return ContentDisposition(disposition_type, params, filename)
- This property provides access to the ClientSession that initiated
- this request, allowing middleware to make additional requests
- using the same session.
- """
- return self._session
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._closed:
+ return
- def update_host(self, url: URL) -> None:
- """Update destination host, port and connection type (ssl)."""
- # get host/port
- if not url.raw_host:
- raise InvalidURL(url)
+ if self._connection is not None:
+ self._connection.release()
+ self._cleanup_writer()
- # basic auth info
- if url.raw_user or url.raw_password:
- self.auth = helpers.BasicAuth(url.user or "", url.password or "")
+ if self._loop.get_debug():
+ _warnings.warn(
+ f"Unclosed response {self!r}", ResourceWarning, source=self
+ )
+ context = {"client_response": self, "message": "Unclosed response"}
+ if self._source_traceback:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
- def update_version(self, version: Union[http.HttpVersion, str]) -> None:
- """Convert request version to two elements tuple.
+ def __repr__(self) -> str:
+ out = io.StringIO()
+ ascii_encodable_url = str(self.url)
+ if self.reason:
+ ascii_encodable_reason = self.reason.encode(
+ "ascii", "backslashreplace"
+ ).decode("ascii")
+ else:
+ ascii_encodable_reason = "None"
+ print(
+ "".format(
+ ascii_encodable_url, self.status, ascii_encodable_reason
+ ),
+ file=out,
+ )
+ print(self.headers, file=out)
+ return out.getvalue()
- parser HTTP version '1.1' => (1, 1)
- """
- if isinstance(version, str):
- v = [part.strip() for part in version.split(".", 1)]
- try:
- version = http.HttpVersion(int(v[0]), int(v[1]))
- except ValueError:
- raise ValueError(
- f"Can not parse http version number: {version}"
- ) from None
- self.version = version
+ @property
+ def connection(self) -> Optional["Connection"]:
+ return self._connection
- def update_headers(self, headers: Optional[LooseHeaders]) -> None:
- """Update request headers."""
- self.headers: CIMultiDict[str] = CIMultiDict()
+ @reify
+ def history(self) -> Tuple["ClientResponse", ...]:
+ """A sequence of responses, if redirects occurred."""
+ return self._history
- # Build the host header
- host = self.url.host_port_subcomponent
+ @reify
+ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
+ links_str = ", ".join(self.headers.getall("link", []))
- # host_port_subcomponent is None when the URL is a relative URL.
- # but we know we do not have a relative URL here.
- assert host is not None
- self.headers[hdrs.HOST] = host
+ if not links_str:
+ return MultiDictProxy(MultiDict())
- if not headers:
- return
+ links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
- if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
- headers = headers.items()
+ for val in re.split(r",(?=\s*<)", links_str):
+ match = re.match(r"\s*<(.*)>(.*)", val)
+ if match is None: # Malformed link
+ continue
+ url, params_str = match.groups()
+ params = params_str.split(";")[1:]
- for key, value in headers: # type: ignore[misc]
- # A special case for Host header
- if key in hdrs.HOST_ALL:
- self.headers[key] = value
- else:
- self.headers.add(key, value)
+ link: MultiDict[Union[str, URL]] = MultiDict()
- def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
- if skip_auto_headers is not None:
- self._skip_auto_headers = CIMultiDict(
- (hdr, None) for hdr in sorted(skip_auto_headers)
- )
- used_headers = self.headers.copy()
- used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type]
- else:
- # Fast path when there are no headers to skip
- # which is the most common case.
- used_headers = self.headers
+ for param in params:
+ match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
+ if match is None: # Malformed param
+ continue
+ key, _, value, _ = match.groups()
- for hdr, val in self.DEFAULT_HEADERS.items():
- if hdr not in used_headers:
- self.headers[hdr] = val
+ link.add(key, value)
- if hdrs.USER_AGENT not in used_headers:
- self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
+ key = link.get("rel", url)
- def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
- """Update request cookies header."""
- if not cookies:
- return
+ link.add("url", self.url.join(URL(url)))
- c = SimpleCookie()
- if hdrs.COOKIE in self.headers:
- c.load(self.headers.get(hdrs.COOKIE, ""))
- del self.headers[hdrs.COOKIE]
+ links.add(str(key), MultiDictProxy(link))
- if isinstance(cookies, Mapping):
- iter_cookies = cookies.items()
- else:
- iter_cookies = cookies # type: ignore[assignment]
- for name, value in iter_cookies:
- if isinstance(value, Morsel):
- # Preserve coded_value
- mrsl_val = value.get(value.key, Morsel())
- mrsl_val.set(value.key, value.value, value.coded_value)
- c[name] = mrsl_val
- else:
- c[name] = value # type: ignore[assignment]
+ return MultiDictProxy(links)
- self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
+ async def start(self, connection: "Connection") -> "ClientResponse":
+ """Start response processing."""
+ self._closed = False
+ self._protocol = connection.protocol
+ self._connection = connection
- def update_content_encoding(self, data: Any, compress: Union[bool, str]) -> None:
- """Set request content encoding."""
- self.compress = None
- if not data:
- return
+ with self._timer:
+ while True:
+ # read response
+ try:
+ protocol = self._protocol
+ message, payload = await protocol.read() # type: ignore[union-attr]
+ except http.HttpProcessingError as exc:
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=exc.code,
+ message=exc.message,
+ headers=exc.headers,
+ ) from exc
- if self.headers.get(hdrs.CONTENT_ENCODING):
- if compress:
- raise ValueError(
- "compress can not be set if Content-Encoding header is set"
- )
- elif compress:
- self.compress = compress if isinstance(compress, str) else "deflate"
- self.headers[hdrs.CONTENT_ENCODING] = self.compress
- self.chunked = True # enable chunked, no need to deal with length
+ if message.code < 100 or message.code > 199 or message.code == 101:
+ break
- def update_transfer_encoding(self) -> None:
- """Analyze transfer-encoding header."""
- te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+ if self._continue is not None:
+ set_result(self._continue, True)
+ self._continue = None
- if "chunked" in te:
- if self.chunked:
- raise ValueError(
- "chunked can not be set "
- 'if "Transfer-Encoding: chunked" header is set'
- )
+ # payload eof handler
+ payload.on_eof(self._response_eof)
- elif self.chunked:
- if hdrs.CONTENT_LENGTH in self.headers:
- raise ValueError(
- "chunked can not be set if Content-Length header is set"
- )
+ # response status
+ self.version = message.version
+ self.status = message.code
+ self.reason = message.reason
- self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
- elif (
- self._body is not None
- and hdrs.CONTENT_LENGTH not in self.headers
- and (size := self._body.size) is not None
- ):
- self.headers[hdrs.CONTENT_LENGTH] = str(size)
+ # headers
+ self._headers = message.headers # type is CIMultiDictProxy
+ self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
- def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
- """Set basic auth."""
- if auth is None:
- auth = self.auth
- if auth is None and trust_env and self.url.host is not None:
- netrc_obj = netrc_from_env()
- with contextlib.suppress(LookupError):
- auth = basicauth_from_netrc(netrc_obj, self.url.host)
- if auth is None:
- return
-
- if not isinstance(auth, helpers.BasicAuth):
- raise TypeError("BasicAuth() tuple is required instead")
+ # payload
+ self.content = payload
- self.headers[hdrs.AUTHORIZATION] = auth.encode()
+ # cookies
+ if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
+ cookies = SimpleCookie()
+ for hdr in cookie_hdrs:
+ try:
+ cookies.load(hdr)
+ except CookieError as exc:
+ client_logger.warning("Can not load response cookies: %s", exc)
+ self._cookies = cookies
+ return self
- def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None:
- """Update request body from data."""
- if self._body is not None:
- _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel)
+ def _response_eof(self) -> None:
+ if self._closed:
+ return
- if body is None:
- self._body = None
+ # protocol could be None because connection could be detached
+ protocol = self._connection and self._connection.protocol
+ if protocol is not None and protocol.upgraded:
return
- # FormData
- maybe_payload = body() if isinstance(body, FormData) else body
+ self._closed = True
+ self._cleanup_writer()
+ self._release_connection()
- try:
- body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None)
- except payload.LookupError:
- boundary: Optional[str] = None
- if CONTENT_TYPE in self.headers:
- boundary = parse_mimetype(self.headers[CONTENT_TYPE]).parameters.get(
- "boundary"
- )
- body_payload = FormData(maybe_payload, boundary=boundary)() # type: ignore[arg-type]
+ @property
+ def closed(self) -> bool:
+ return self._closed
- self._body = body_payload
- # enable chunked encoding if needed
- if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
- if (size := body_payload.size) is not None:
- self.headers[hdrs.CONTENT_LENGTH] = str(size)
- else:
- self.chunked = True
+ def close(self) -> None:
+ if not self._released:
+ self._notify_content()
- # copy payload headers
- assert body_payload.headers
- headers = self.headers
- skip_headers = self._skip_auto_headers
- for key, value in body_payload.headers.items():
- if key in headers or (skip_headers is not None and key in skip_headers):
- continue
- headers[key] = value
+ self._closed = True
+ if self._loop.is_closed():
+ return
- def _update_body(self, body: Any) -> None:
- """Update request body after its already been set."""
- # Remove existing Content-Length header since body is changing
- if hdrs.CONTENT_LENGTH in self.headers:
- del self.headers[hdrs.CONTENT_LENGTH]
+ self._cleanup_writer()
+ if self._connection is not None:
+ self._connection.close()
+ self._connection = None
- # Remove existing Transfer-Encoding header to avoid conflicts
- if self.chunked and hdrs.TRANSFER_ENCODING in self.headers:
- del self.headers[hdrs.TRANSFER_ENCODING]
+ def release(self) -> None:
+ if not self._released:
+ self._notify_content()
- # Now update the body using the existing method
- # Called from _update_body, add 1 to stacklevel from caller
- self.update_body_from_data(body, _stacklevel=4)
+ self._closed = True
- # Update transfer encoding headers if needed (same logic as __init__)
- if body is not None or self.method not in self.GET_METHODS:
- self.update_transfer_encoding()
+ self._cleanup_writer()
+ self._release_connection()
- async def update_body(self, body: Any) -> None:
+ @property
+ def ok(self) -> bool:
+ """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
+
+ This is **not** a check for ``200 OK`` but a check that the response
+ status is under 400.
"""
- Update request body and close previous payload if needed.
+ return 400 > self.status
- This method safely updates the request body by first closing any existing
- payload to prevent resource leaks, then setting the new body.
+ def raise_for_status(self) -> None:
+ if not self.ok:
+ # reason should always be not None for a started response
+ assert self.reason is not None
- IMPORTANT: Always use this method instead of setting request.body directly.
- Direct assignment to request.body will leak resources if the previous body
- contains file handles, streams, or other resources that need cleanup.
+ # If we're in a context we can rely on __aexit__() to release as the
+ # exception propagates.
+ if not self._in_context:
+ self.release()
- Args:
- body: The new body content. Can be:
- - bytes/bytearray: Raw binary data
- - str: Text data (will be encoded using charset from Content-Type)
- - FormData: Form data that will be encoded as multipart/form-data
- - Payload: A pre-configured payload object
- - AsyncIterable: An async iterable of bytes chunks
- - File-like object: Will be read and sent as binary data
- - None: Clears the body
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=self.status,
+ message=self.reason,
+ headers=self.headers,
+ )
- Usage:
- # CORRECT: Use update_body
- await request.update_body(b"new request data")
+ def _release_connection(self) -> None:
+ if self._connection is not None:
+ if self.__writer is None:
+ self._connection.release()
+ self._connection = None
+ else:
+ self.__writer.add_done_callback(lambda f: self._release_connection())
- # WRONG: Don't set body directly
- # request.body = b"new request data" # This will leak resources!
+ async def _wait_released(self) -> None:
+ if self.__writer is not None:
+ try:
+ await self.__writer
+ except asyncio.CancelledError:
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+ self._release_connection()
- # Update with form data
- form_data = FormData()
- form_data.add_field('field', 'value')
- await request.update_body(form_data)
+ def _cleanup_writer(self) -> None:
+ if self.__writer is not None:
+ self.__writer.cancel()
+ self._session = None
- # Clear body
- await request.update_body(None)
+ def _notify_content(self) -> None:
+ content = self.content
+ # content can be None here, but the types are cheated elsewhere.
+ if content and content.exception() is None: # type: ignore[truthy-bool]
+ set_exception(content, _CONNECTION_CLOSED_EXCEPTION)
+ self._released = True
- Note:
- This method is async because it may need to close file handles or
- other resources associated with the previous payload. Always await
- this method to ensure proper cleanup.
+ async def wait_for_close(self) -> None:
+ if self.__writer is not None:
+ try:
+ await self.__writer
+ except asyncio.CancelledError:
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+ self.release()
- Warning:
- Setting request.body directly is highly discouraged and can lead to:
- - Resource leaks (unclosed file handles, streams)
- - Memory leaks (unreleased buffers)
- - Unexpected behavior with streaming payloads
+ async def read(self) -> bytes:
+ """Read response payload."""
+ if self._body is None:
+ try:
+ self._body = await self.content.read()
+ for trace in self._traces:
+ await trace.send_response_chunk_received(
+ self.method, self.url, self._body
+ )
+ except BaseException:
+ self.close()
+ raise
+ elif self._released: # Response explicitly released
+ raise ClientConnectionError("Connection closed")
- It is not recommended to change the payload type in middleware. If the
- body was already set (e.g., as bytes), it's best to keep the same type
- rather than converting it (e.g., to str) as this may result in unexpected
- behavior.
+ protocol = self._connection and self._connection.protocol
+ if protocol is None or not protocol.upgraded:
+ await self._wait_released() # Underlying connection released
+ return self._body
- See Also:
- - update_body_from_data: Synchronous body update without cleanup
- - body property: Direct body access (STRONGLY DISCOURAGED)
+ def get_encoding(self) -> str:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ mimetype = helpers.parse_mimetype(ctype)
- """
- # Close existing payload if it exists and needs closing
- if self._body is not None:
- await self._body.close()
- self._update_body(body)
+ encoding = mimetype.parameters.get("charset")
+ if encoding:
+ with contextlib.suppress(LookupError, ValueError):
+ return codecs.lookup(encoding).name
- def update_expect_continue(self, expect: bool = False) -> None:
- if expect:
- self.headers[hdrs.EXPECT] = "100-continue"
- elif (
- hdrs.EXPECT in self.headers
- and self.headers[hdrs.EXPECT].lower() == "100-continue"
+ if mimetype.type == "application" and (
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
):
- expect = True
+ # RFC 7159 states that the default encoding is UTF-8.
+ # RFC 7483 defines application/rdap+json
+ return "utf-8"
- if expect:
- self._continue = self.loop.create_future()
+ if self._body is None:
+ raise RuntimeError(
+ "Cannot compute fallback encoding of a not yet read body"
+ )
- def update_proxy(
- self,
- proxy: Optional[URL],
- proxy_auth: Optional[BasicAuth],
- proxy_headers: Optional[LooseHeaders],
- ) -> None:
- self.proxy = proxy
- if proxy is None:
- self.proxy_auth = None
- self.proxy_headers = None
- return
+ return self._resolve_charset(self, self._body)
- if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
- raise ValueError("proxy_auth must be None or BasicAuth() tuple")
- self.proxy_auth = proxy_auth
+ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
+ """Read response payload and decode."""
+ await self.read()
- if proxy_headers is not None and not isinstance(
- proxy_headers, (MultiDict, MultiDictProxy)
- ):
- proxy_headers = CIMultiDict(proxy_headers)
- self.proxy_headers = proxy_headers
+ if encoding is None:
+ encoding = self.get_encoding()
- async def write_bytes(
+ return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]
+
+ async def json(
self,
- writer: AbstractStreamWriter,
- conn: "Connection",
- content_length: Optional[int],
+ *,
+ encoding: Optional[str] = None,
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
+ content_type: Optional[str] = "application/json",
+ ) -> Any:
+ """Read and decodes JSON response."""
+ await self.read()
+
+ if content_type:
+ if not is_expected_content_type(self.content_type, content_type):
+ raise ContentTypeError(
+ self.request_info,
+ self.history,
+ status=self.status,
+ message=(
+ "Attempt to decode JSON with "
+ "unexpected mimetype: %s" % self.content_type
+ ),
+ headers=self.headers,
+ )
+
+ if encoding is None:
+ encoding = self.get_encoding()
+
+ return loads(self._body.decode(encoding)) # type: ignore[union-attr]
+
+ async def __aenter__(self) -> "ClientResponse":
+ self._in_context = True
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
) -> None:
- """
- Write the request body to the connection stream.
+ self._in_context = False
+ # similar to _RequestContextManager, we do not need to check
+ # for exceptions, response object can close connection
+ # if state is broken
+ self.release()
+ await self.wait_for_close()
- This method handles writing different types of request bodies:
- 1. Payload objects (using their specialized write_with_length method)
- 2. Bytes/bytearray objects
- 3. Iterable body content
- Args:
- writer: The stream writer to write the body to
- conn: The connection being used for this request
- content_length: Optional maximum number of bytes to write from the body
- (None means write the entire body)
+class ClientRequest:
+ GET_METHODS = {
+ hdrs.METH_GET,
+ hdrs.METH_HEAD,
+ hdrs.METH_OPTIONS,
+ hdrs.METH_TRACE,
+ }
+ POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
+ ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
- The method properly handles:
- - Waiting for 100-Continue responses if required
- - Content length constraints for chunked encoding
- - Error handling for network issues, cancellation, and other exceptions
- - Signaling EOF and timeout management
+ DEFAULT_HEADERS = {
+ hdrs.ACCEPT: "*/*",
+ hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
+ }
- Raises:
- ClientOSError: When there's an OS-level error writing the body
- ClientConnectionError: When there's a general connection error
- asyncio.CancelledError: When the operation is cancelled
+ # Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
+ _body: Union[None, payload.Payload] = None
+ auth = None
+ response = None
- """
- # 100 response
- if self._continue is not None:
- # Force headers to be sent before waiting for 100-continue
- writer.send_headers()
- await writer.drain()
- await self._continue
+ # These class defaults help create_autospec() work correctly.
+ # If autospec is improved in future, maybe these can be removed.
+ url = URL()
+ method = "GET"
- protocol = conn.protocol
- assert protocol is not None
- try:
- # This should be a rare case but the
- # self._body can be set to None while
- # the task is being started or we wait above
- # for the 100-continue response.
- # The more likely case is we have an empty
- # payload, but 100-continue is still expected.
- if self._body is not None:
- await self._body.write_with_length(writer, content_length)
- except OSError as underlying_exc:
- reraised_exc = underlying_exc
+ __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data
+ _continue = None # waiter future for '100 Continue' response
- # Distinguish between timeout and other OS errors for better error reporting
- exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
- underlying_exc, asyncio.TimeoutError
- )
- if exc_is_not_timeout:
- reraised_exc = ClientOSError(
- underlying_exc.errno,
- f"Can not write request body for {self.url !s}",
- )
+ _skip_auto_headers: Optional["CIMultiDict[None]"] = None
- set_exception(protocol, reraised_exc, underlying_exc)
- except asyncio.CancelledError:
- # Body hasn't been fully sent, so connection can't be reused
- conn.close()
- raise
- except Exception as underlying_exc:
- set_exception(
- protocol,
- ClientConnectionError(
- "Failed to send bytes into the underlying connection "
- f"{conn !s}: {underlying_exc!r}",
- ),
- underlying_exc,
+ # N.B.
+ # Adding __del__ method with self._writer closing doesn't make sense
+ # because _writer is instance method, thus it keeps a reference to self.
+ # Until writer has finished finalizer will not be called.
+
+ def __init__(
+ self,
+ method: str,
+ url: URL,
+ *,
+ params: Query = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ data: Any = None,
+ cookies: Optional[LooseCookies] = None,
+ auth: Optional[BasicAuth] = None,
+ version: http.HttpVersion = http.HttpVersion11,
+ compress: Union[str, bool] = False,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ loop: asyncio.AbstractEventLoop,
+ response_class: Optional[Type["ClientResponse"]] = None,
+ proxy: Optional[URL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timer: Optional[BaseTimerContext] = None,
+ session: Optional["ClientSession"] = None,
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
+ proxy_headers: Optional[LooseHeaders] = None,
+ traces: Optional[List["Trace"]] = None,
+ trust_env: bool = False,
+ server_hostname: Optional[str] = None,
+ ):
+ if match := _CONTAINS_CONTROL_CHAR_RE.search(method):
+ raise ValueError(
+ f"Method cannot contain non-token characters {method!r} "
+ f"(found at least {match.group()!r})"
)
+ # URL forbids subclasses, so a simple type check is enough.
+ assert type(url) is URL, url
+ if proxy is not None:
+ assert type(proxy) is URL, proxy
+ # FIXME: session is None in tests only, need to fix tests
+ # assert session is not None
+ if TYPE_CHECKING:
+ assert session is not None
+ self._session = session
+ if params:
+ url = url.extend_query(params)
+ self.original_url = url
+ self.url = url.with_fragment(None) if url.raw_fragment else url
+ self.method = method.upper()
+ self.chunked = chunked
+ self.loop = loop
+ self.length = None
+ if response_class is None:
+ real_response_class = ClientResponse
else:
- # Successfully wrote the body, signal EOF and start response timeout
- await writer.write_eof()
- protocol.start_timeout()
+ real_response_class = response_class
+ self.response_class: Type[ClientResponse] = real_response_class
+ self._timer = timer if timer is not None else TimerNoop()
+ self._ssl = ssl
+ self.server_hostname = server_hostname
- async def send(self, conn: "Connection") -> "ClientResponse":
- # Specify request target:
- # - CONNECT request must send authority form URI
- # - not CONNECT proxy must send absolute form URI
- # - most common is origin form URI
- if self.method == hdrs.METH_CONNECT:
- connect_host = self.url.host_subcomponent
- assert connect_host is not None
- path = f"{connect_host}:{self.url.port}"
- elif self.proxy and not self.is_ssl():
- path = str(self.url)
- else:
- path = self.url.raw_path_qs
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
- protocol = conn.protocol
- assert protocol is not None
- writer = StreamWriter(
- protocol,
- self.loop,
- on_chunk_sent=(
- functools.partial(self._on_chunk_request_sent, self.method, self.url)
- if self._traces
- else None
- ),
- on_headers_sent=(
- functools.partial(self._on_headers_request_sent, self.method, self.url)
- if self._traces
- else None
+ self.update_version(version)
+ self.update_host(url)
+ self.update_headers(headers)
+ self.update_auto_headers(skip_auto_headers)
+ self.update_cookies(cookies)
+ self.update_content_encoding(data, compress)
+ self.update_auth(auth, trust_env)
+ self.update_proxy(proxy, proxy_auth, proxy_headers)
+
+ self.update_body_from_data(data)
+ if data is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
+ self.update_expect_continue(expect100)
+ self._traces = [] if traces is None else traces
+
+ def __reset_writer(self, _: object = None) -> None:
+ self.__writer = None
+
+ def _get_content_length(self) -> Optional[int]:
+ """Extract and validate Content-Length header value.
+
+ Returns parsed Content-Length value or None if not set.
+ Raises ValueError if header exists but cannot be parsed as an integer.
+ """
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ return None
+
+ content_length_hdr = self.headers[hdrs.CONTENT_LENGTH]
+ try:
+ return int(content_length_hdr)
+ except ValueError:
+ raise ValueError(
+ f"Invalid Content-Length header: {content_length_hdr}"
+ ) from None
+
+ @property
+ def skip_auto_headers(self) -> CIMultiDict[None]:
+ return self._skip_auto_headers or CIMultiDict()
+
+ @property
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
+ return self.__writer
+
+ @_writer.setter
+ def _writer(self, writer: "asyncio.Task[None]") -> None:
+ if self.__writer is not None:
+ self.__writer.remove_done_callback(self.__reset_writer)
+ self.__writer = writer
+ writer.add_done_callback(self.__reset_writer)
+
+ def is_ssl(self) -> bool:
+ return self.url.scheme in _SSL_SCHEMES
+
+ @property
+ def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
+ return self._ssl
+
+ @property
+ def connection_key(self) -> ConnectionKey: # type: ignore[misc]
+ if proxy_headers := self.proxy_headers:
+ h: Optional[int] = hash(tuple(proxy_headers.items()))
+ else:
+ h = None
+ url = self.url
+ return tuple.__new__(
+ ConnectionKey,
+ (
+ url.raw_host or "",
+ url.port,
+ url.scheme in _SSL_SCHEMES,
+ self._ssl,
+ self.proxy,
+ self.proxy_auth,
+ h,
),
)
- if self.compress:
- writer.enable_compression(self.compress)
+ @property
+ def host(self) -> str:
+ ret = self.url.raw_host
+ assert ret is not None
+ return ret
- if self.chunked is not None:
- writer.enable_chunking()
+ @property
+ def port(self) -> Optional[int]:
+ return self.url.port
- # set default content-type
- if (
- self.method in self.POST_METHODS
- and (
- self._skip_auto_headers is None
- or hdrs.CONTENT_TYPE not in self._skip_auto_headers
- )
- and hdrs.CONTENT_TYPE not in self.headers
- ):
- self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
+ @property
+ def body(self) -> Union[payload.Payload, Literal[b""]]:
+ """Request body."""
+ # empty body is represented as bytes for backwards compatibility
+ return self._body or b""
- v = self.version
- if hdrs.CONNECTION not in self.headers:
- if conn._connector.force_close:
- if v == HttpVersion11:
- self.headers[hdrs.CONNECTION] = "close"
- elif v == HttpVersion10:
- self.headers[hdrs.CONNECTION] = "keep-alive"
+ @body.setter
+ def body(self, value: Any) -> None:
+ """Set request body with warning for non-autoclose payloads.
- # status + headers
- status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
+ WARNING: This setter must be called from within an event loop and is not
+ thread-safe. Setting body outside of an event loop may raise RuntimeError
+ when closing file-based payloads.
- # Buffer headers for potential coalescing with body
- await writer.write_headers(status_line, self.headers)
+ DEPRECATED: Direct assignment to body is deprecated and will be removed
+ in a future version. Use await update_body() instead for proper resource
+ management.
+ """
+ # Close existing payload if present
+ if self._body is not None:
+ # Warn if the payload needs manual closing
+ # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload
+ _warn_if_unclosed_payload(self._body, stacklevel=3)
+ # NOTE: In the future, when we remove sync close support,
+ # this setter will need to be removed and only the async
+ # update_body() method will be available. For now, we call
+ # _close() for backwards compatibility.
+ self._body._close()
+ self._update_body(value)
- task: Optional["asyncio.Task[None]"]
- if self._body or self._continue is not None or protocol.writing_paused:
- coro = self.write_bytes(writer, conn, self._get_content_length())
- if sys.version_info >= (3, 12):
- # Optimization for Python 3.12, try to write
- # bytes immediately to avoid having to schedule
- # the task on the event loop.
- task = asyncio.Task(coro, loop=self.loop, eager_start=True)
- else:
- task = self.loop.create_task(coro)
- if task.done():
- task = None
- else:
- self._writer = task
- else:
- # We have nothing to write because
- # - there is no body
- # - the protocol does not have writing paused
- # - we are not waiting for a 100-continue response
- protocol.start_timeout()
- writer.set_eof()
- task = None
- response_class = self.response_class
- assert response_class is not None
- self.response = response_class(
- self.method,
- self.original_url,
- writer=task,
- continue100=self._continue,
- timer=self._timer,
- request_info=self.request_info,
- traces=self._traces,
- loop=self.loop,
- session=self._session,
+ @property
+ def request_info(self) -> RequestInfo:
+ headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
+ # These are created on every request, so we use a NamedTuple
+ # for performance reasons. We don't use the RequestInfo.__new__
+ # method because it has a different signature which is provided
+ # for backwards compatibility only.
+ return tuple.__new__(
+ RequestInfo, (self.url, self.method, headers, self.original_url)
)
- return self.response
- async def close(self) -> None:
- if self.__writer is not None:
- try:
- await self.__writer
- except asyncio.CancelledError:
- if (
- sys.version_info >= (3, 11)
- and (task := asyncio.current_task())
- and task.cancelling()
- ):
- raise
+ @property
+ def session(self) -> "ClientSession":
+ """Return the ClientSession instance.
- def terminate(self) -> None:
- if self.__writer is not None:
- if not self.loop.is_closed():
- self.__writer.cancel()
- self.__writer.remove_done_callback(self.__reset_writer)
- self.__writer = None
+ This property provides access to the ClientSession that initiated
+ this request, allowing middleware to make additional requests
+ using the same session.
+ """
+ return self._session
- async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
- for trace in self._traces:
- await trace.send_request_chunk_sent(method, url, chunk)
+ def update_host(self, url: URL) -> None:
+ """Update destination host, port and connection type (ssl)."""
+ # get host/port
+ if not url.raw_host:
+ raise InvalidURL(url)
- async def _on_headers_request_sent(
- self, method: str, url: URL, headers: "CIMultiDict[str]"
- ) -> None:
- for trace in self._traces:
- await trace.send_request_headers(method, url, headers)
+ # basic auth info
+ if url.raw_user or url.raw_password:
+ self.auth = helpers.BasicAuth(url.user or "", url.password or "")
+ def update_version(self, version: Union[http.HttpVersion, str]) -> None:
+ """Convert request version to two elements tuple.
-_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed")
+ parser HTTP version '1.1' => (1, 1)
+ """
+ if isinstance(version, str):
+ v = [part.strip() for part in version.split(".", 1)]
+ try:
+ version = http.HttpVersion(int(v[0]), int(v[1]))
+ except ValueError:
+ raise ValueError(
+ f"Can not parse http version number: {version}"
+ ) from None
+ self.version = version
+ def update_headers(self, headers: Optional[LooseHeaders]) -> None:
+ """Update request headers."""
+ self.headers: CIMultiDict[str] = CIMultiDict()
-class ClientResponse(HeadersMixin):
- # Some of these attributes are None when created,
- # but will be set by the start() method.
- # As the end user will likely never see the None values, we cheat the types below.
- # from the Status-Line of the response
- version: Optional[HttpVersion] = None # HTTP-Version
- status: int = None # type: ignore[assignment] # Status-Code
- reason: Optional[str] = None # Reason-Phrase
+ # Build the host header
+ host = self.url.host_port_subcomponent
- content: StreamReader = None # type: ignore[assignment] # Payload stream
- _body: Optional[bytes] = None
- _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
- _history: Tuple["ClientResponse", ...] = ()
- _raw_headers: RawHeaders = None # type: ignore[assignment]
+ # host_port_subcomponent is None when the URL is a relative URL.
+ # but we know we do not have a relative URL here.
+ assert host is not None
+ self.headers[hdrs.HOST] = host
- _connection: Optional["Connection"] = None # current connection
- _cookies: Optional[SimpleCookie] = None
- _continue: Optional["asyncio.Future[bool]"] = None
- _source_traceback: Optional[traceback.StackSummary] = None
- _session: Optional["ClientSession"] = None
- # set up by ClientRequest after ClientResponse object creation
- # post-init stage allows to not change ctor signature
- _closed = True # to allow __del__ for non-initialized properly response
- _released = False
- _in_context = False
+ if not headers:
+ return
+
+ if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
+ headers = headers.items()
- _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8"
+ for key, value in headers: # type: ignore[misc]
+ # A special case for Host header
+ if key in hdrs.HOST_ALL:
+ self.headers[key] = value
+ else:
+ self.headers.add(key, value)
- __writer: Optional["asyncio.Task[None]"] = None
+ def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
+ if skip_auto_headers is not None:
+ self._skip_auto_headers = CIMultiDict(
+ (hdr, None) for hdr in sorted(skip_auto_headers)
+ )
+ used_headers = self.headers.copy()
+ used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type]
+ else:
+ # Fast path when there are no headers to skip
+ # which is the most common case.
+ used_headers = self.headers
- def __init__(
- self,
- method: str,
- url: URL,
- *,
- writer: "Optional[asyncio.Task[None]]",
- continue100: Optional["asyncio.Future[bool]"],
- timer: Optional[BaseTimerContext],
- request_info: RequestInfo,
- traces: List["Trace"],
- loop: asyncio.AbstractEventLoop,
- session: "ClientSession",
- ) -> None:
- # URL forbids subclasses, so a simple type check is enough.
- assert type(url) is URL
+ for hdr, val in self.DEFAULT_HEADERS.items():
+ if hdr not in used_headers:
+ self.headers[hdr] = val
- self.method = method
+ if hdrs.USER_AGENT not in used_headers:
+ self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
- self._real_url = url
- self._url = url.with_fragment(None) if url.raw_fragment else url
- if writer is not None:
- self._writer = writer
- if continue100 is not None:
- self._continue = continue100
- self._request_info = request_info
- self._timer = timer if timer is not None else TimerNoop()
- self._cache: Dict[str, Any] = {}
- self._traces = traces
- self._loop = loop
- # Save reference to _resolve_charset, so that get_encoding() will still
- # work after the response has finished reading the body.
- # TODO: Fix session=None in tests (see ClientRequest.__init__).
- if session is not None:
- # store a reference to session #1985
- self._session = session
- self._resolve_charset = session._resolve_charset
- if loop.get_debug():
- self._source_traceback = traceback.extract_stack(sys._getframe(1))
+ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
+ """Update request cookies header."""
+ if not cookies:
+ return
- def __reset_writer(self, _: object = None) -> None:
- self.__writer = None
+ c = SimpleCookie()
+ if hdrs.COOKIE in self.headers:
+ c.load(self.headers.get(hdrs.COOKIE, ""))
+ del self.headers[hdrs.COOKIE]
- @property
- def _writer(self) -> Optional["asyncio.Task[None]"]:
- """The writer task for streaming data.
+ if isinstance(cookies, Mapping):
+ iter_cookies = cookies.items()
+ else:
+ iter_cookies = cookies # type: ignore[assignment]
+ for name, value in iter_cookies:
+ if isinstance(value, Morsel):
+ # Preserve coded_value
+ mrsl_val = value.get(value.key, Morsel())
+ mrsl_val.set(value.key, value.value, value.coded_value)
+ c[name] = mrsl_val
+ else:
+ c[name] = value # type: ignore[assignment]
- _writer is only provided for backwards compatibility
- for subclasses that may need to access it.
- """
- return self.__writer
+ self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
- @_writer.setter
- def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
- """Set the writer task for streaming data."""
- if self.__writer is not None:
- self.__writer.remove_done_callback(self.__reset_writer)
- self.__writer = writer
- if writer is None:
+ def update_content_encoding(self, data: Any, compress: Union[bool, str]) -> None:
+ """Set request content encoding."""
+ self.compress = None
+ if not data:
return
- if writer.done():
- # The writer is already done, so we can clear it immediately.
- self.__writer = None
- else:
- writer.add_done_callback(self.__reset_writer)
- @property
- def cookies(self) -> SimpleCookie:
- if self._cookies is None:
- self._cookies = SimpleCookie()
- return self._cookies
+ if self.headers.get(hdrs.CONTENT_ENCODING):
+ if compress:
+ raise ValueError(
+ "compress can not be set if Content-Encoding header is set"
+ )
+ elif compress:
+ self.compress = compress if isinstance(compress, str) else "deflate"
+ self.headers[hdrs.CONTENT_ENCODING] = self.compress
+ self.chunked = True # enable chunked, no need to deal with length
- @cookies.setter
- def cookies(self, cookies: SimpleCookie) -> None:
- self._cookies = cookies
+ def update_transfer_encoding(self) -> None:
+ """Analyze transfer-encoding header."""
+ te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
- @reify
- def url(self) -> URL:
- return self._url
+ if "chunked" in te:
+ if self.chunked:
+ raise ValueError(
+ "chunked can not be set "
+ 'if "Transfer-Encoding: chunked" header is set'
+ )
- @reify
- def real_url(self) -> URL:
- return self._real_url
+ elif self.chunked:
+ if hdrs.CONTENT_LENGTH in self.headers:
+ raise ValueError(
+ "chunked can not be set if Content-Length header is set"
+ )
- @reify
- def host(self) -> str:
- assert self._url.host is not None
- return self._url.host
+ self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ elif (
+ self._body is not None
+ and hdrs.CONTENT_LENGTH not in self.headers
+ and (size := self._body.size) is not None
+ ):
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
- @reify
- def headers(self) -> "CIMultiDictProxy[str]":
- return self._headers
+ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
+ """Set basic auth."""
+ if auth is None:
+ auth = self.auth
+ if auth is None and trust_env and self.url.host is not None:
+ netrc_obj = netrc_from_env()
+ with contextlib.suppress(LookupError):
+ auth = basicauth_from_netrc(netrc_obj, self.url.host)
+ if auth is None:
+ return
- @reify
- def raw_headers(self) -> RawHeaders:
- return self._raw_headers
+ if not isinstance(auth, helpers.BasicAuth):
+ raise TypeError("BasicAuth() tuple is required instead")
- @reify
- def request_info(self) -> RequestInfo:
- return self._request_info
+ self.headers[hdrs.AUTHORIZATION] = auth.encode()
- @reify
- def content_disposition(self) -> Optional[ContentDisposition]:
- raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
- if raw is None:
- return None
- disposition_type, params_dct = multipart.parse_content_disposition(raw)
- params = MappingProxyType(params_dct)
- filename = multipart.content_disposition_filename(params)
- return ContentDisposition(disposition_type, params, filename)
+ def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None:
+ """Update request body from data."""
+ if self._body is not None:
+ _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel)
- def __del__(self, _warnings: Any = warnings) -> None:
- if self._closed:
+ if body is None:
+ self._body = None
return
- if self._connection is not None:
- self._connection.release()
- self._cleanup_writer()
+ # FormData
+ maybe_payload = body() if isinstance(body, FormData) else body
- if self._loop.get_debug():
- _warnings.warn(
- f"Unclosed response {self!r}", ResourceWarning, source=self
+ try:
+ body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None)
+ except payload.LookupError:
+ boundary: Optional[str] = None
+ if CONTENT_TYPE in self.headers:
+ boundary = parse_mimetype(self.headers[CONTENT_TYPE]).parameters.get(
+ "boundary"
)
- context = {"client_response": self, "message": "Unclosed response"}
- if self._source_traceback:
- context["source_traceback"] = self._source_traceback
- self._loop.call_exception_handler(context)
+ body_payload = FormData(maybe_payload, boundary=boundary)() # type: ignore[arg-type]
- def __repr__(self) -> str:
- out = io.StringIO()
- ascii_encodable_url = str(self.url)
- if self.reason:
- ascii_encodable_reason = self.reason.encode(
- "ascii", "backslashreplace"
- ).decode("ascii")
- else:
- ascii_encodable_reason = "None"
- print(
- "".format(
- ascii_encodable_url, self.status, ascii_encodable_reason
- ),
- file=out,
- )
- print(self.headers, file=out)
- return out.getvalue()
+ self._body = body_payload
+ # enable chunked encoding if needed
+ if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
+ if (size := body_payload.size) is not None:
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
+ else:
+ self.chunked = True
+
+ # copy payload headers
+ assert body_payload.headers
+ headers = self.headers
+ skip_headers = self._skip_auto_headers
+ for key, value in body_payload.headers.items():
+ if key in headers or (skip_headers is not None and key in skip_headers):
+ continue
+ headers[key] = value
+
+ def _update_body(self, body: Any) -> None:
+ """Update request body after its already been set."""
+ # Remove existing Content-Length header since body is changing
+ if hdrs.CONTENT_LENGTH in self.headers:
+ del self.headers[hdrs.CONTENT_LENGTH]
+
+ # Remove existing Transfer-Encoding header to avoid conflicts
+ if self.chunked and hdrs.TRANSFER_ENCODING in self.headers:
+ del self.headers[hdrs.TRANSFER_ENCODING]
- @property
- def connection(self) -> Optional["Connection"]:
- return self._connection
+ # Now update the body using the existing method
+ # Called from _update_body, add 1 to stacklevel from caller
+ self.update_body_from_data(body, _stacklevel=4)
- @reify
- def history(self) -> Tuple["ClientResponse", ...]:
- """A sequence of responses, if redirects occurred."""
- return self._history
+ # Update transfer encoding headers if needed (same logic as __init__)
+ if body is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
- @reify
- def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
- links_str = ", ".join(self.headers.getall("link", []))
+ async def update_body(self, body: Any) -> None:
+ """
+ Update request body and close previous payload if needed.
- if not links_str:
- return MultiDictProxy(MultiDict())
+ This method safely updates the request body by first closing any existing
+ payload to prevent resource leaks, then setting the new body.
- links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
+ IMPORTANT: Always use this method instead of setting request.body directly.
+ Direct assignment to request.body will leak resources if the previous body
+ contains file handles, streams, or other resources that need cleanup.
- for val in re.split(r",(?=\s*<)", links_str):
- match = re.match(r"\s*<(.*)>(.*)", val)
- if match is None: # Malformed link
- continue
- url, params_str = match.groups()
- params = params_str.split(";")[1:]
+ Args:
+ body: The new body content. Can be:
+ - bytes/bytearray: Raw binary data
+ - str: Text data (will be encoded using charset from Content-Type)
+ - FormData: Form data that will be encoded as multipart/form-data
+ - Payload: A pre-configured payload object
+ - AsyncIterable: An async iterable of bytes chunks
+ - File-like object: Will be read and sent as binary data
+ - None: Clears the body
- link: MultiDict[Union[str, URL]] = MultiDict()
+ Usage:
+ # CORRECT: Use update_body
+ await request.update_body(b"new request data")
- for param in params:
- match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
- if match is None: # Malformed param
- continue
- key, _, value, _ = match.groups()
+ # WRONG: Don't set body directly
+ # request.body = b"new request data" # This will leak resources!
- link.add(key, value)
+ # Update with form data
+ form_data = FormData()
+ form_data.add_field('field', 'value')
+ await request.update_body(form_data)
- key = link.get("rel", url)
+ # Clear body
+ await request.update_body(None)
- link.add("url", self.url.join(URL(url)))
+ Note:
+ This method is async because it may need to close file handles or
+ other resources associated with the previous payload. Always await
+ this method to ensure proper cleanup.
- links.add(str(key), MultiDictProxy(link))
+ Warning:
+ Setting request.body directly is highly discouraged and can lead to:
+ - Resource leaks (unclosed file handles, streams)
+ - Memory leaks (unreleased buffers)
+ - Unexpected behavior with streaming payloads
- return MultiDictProxy(links)
+ It is not recommended to change the payload type in middleware. If the
+ body was already set (e.g., as bytes), it's best to keep the same type
+ rather than converting it (e.g., to str) as this may result in unexpected
+ behavior.
- async def start(self, connection: "Connection") -> "ClientResponse":
- """Start response processing."""
- self._closed = False
- self._protocol = connection.protocol
- self._connection = connection
+ See Also:
+ - update_body_from_data: Synchronous body update without cleanup
+ - body property: Direct body access (STRONGLY DISCOURAGED)
- with self._timer:
- while True:
- # read response
- try:
- protocol = self._protocol
- message, payload = await protocol.read() # type: ignore[union-attr]
- except http.HttpProcessingError as exc:
- raise ClientResponseError(
- self.request_info,
- self.history,
- status=exc.code,
- message=exc.message,
- headers=exc.headers,
- ) from exc
+ """
+ # Close existing payload if it exists and needs closing
+ if self._body is not None:
+ await self._body.close()
+ self._update_body(body)
- if message.code < 100 or message.code > 199 or message.code == 101:
- break
+ def update_expect_continue(self, expect: bool = False) -> None:
+ if expect:
+ self.headers[hdrs.EXPECT] = "100-continue"
+ elif (
+ hdrs.EXPECT in self.headers
+ and self.headers[hdrs.EXPECT].lower() == "100-continue"
+ ):
+ expect = True
- if self._continue is not None:
- set_result(self._continue, True)
- self._continue = None
+ if expect:
+ self._continue = self.loop.create_future()
- # payload eof handler
- payload.on_eof(self._response_eof)
+ def update_proxy(
+ self,
+ proxy: Optional[URL],
+ proxy_auth: Optional[BasicAuth],
+ proxy_headers: Optional[LooseHeaders],
+ ) -> None:
+ self.proxy = proxy
+ if proxy is None:
+ self.proxy_auth = None
+ self.proxy_headers = None
+ return
- # response status
- self.version = message.version
- self.status = message.code
- self.reason = message.reason
+ if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
+ raise ValueError("proxy_auth must be None or BasicAuth() tuple")
+ self.proxy_auth = proxy_auth
- # headers
- self._headers = message.headers # type is CIMultiDictProxy
- self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
+ if proxy_headers is not None and not isinstance(
+ proxy_headers, (MultiDict, MultiDictProxy)
+ ):
+ proxy_headers = CIMultiDict(proxy_headers)
+ self.proxy_headers = proxy_headers
- # payload
- self.content = payload
+ async def write_bytes(
+ self,
+ writer: AbstractStreamWriter,
+ conn: "Connection",
+ content_length: Optional[int],
+ ) -> None:
+ """
+ Write the request body to the connection stream.
- # cookies
- if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
- cookies = SimpleCookie()
- for hdr in cookie_hdrs:
- try:
- cookies.load(hdr)
- except CookieError as exc:
- client_logger.warning("Can not load response cookies: %s", exc)
- self._cookies = cookies
- return self
+ This method handles writing different types of request bodies:
+ 1. Payload objects (using their specialized write_with_length method)
+ 2. Bytes/bytearray objects
+ 3. Iterable body content
- def _response_eof(self) -> None:
- if self._closed:
- return
+ Args:
+ writer: The stream writer to write the body to
+ conn: The connection being used for this request
+ content_length: Optional maximum number of bytes to write from the body
+ (None means write the entire body)
- # protocol could be None because connection could be detached
- protocol = self._connection and self._connection.protocol
- if protocol is not None and protocol.upgraded:
- return
+ The method properly handles:
+ - Waiting for 100-Continue responses if required
+ - Content length constraints for chunked encoding
+ - Error handling for network issues, cancellation, and other exceptions
+ - Signaling EOF and timeout management
- self._closed = True
- self._cleanup_writer()
- self._release_connection()
+ Raises:
+ ClientOSError: When there's an OS-level error writing the body
+ ClientConnectionError: When there's a general connection error
+ asyncio.CancelledError: When the operation is cancelled
- @property
- def closed(self) -> bool:
- return self._closed
+ """
+ # 100 response
+ if self._continue is not None:
+ # Force headers to be sent before waiting for 100-continue
+ writer.send_headers()
+ await writer.drain()
+ await self._continue
- def close(self) -> None:
- if not self._released:
- self._notify_content()
+ protocol = conn.protocol
+ assert protocol is not None
+ try:
+ # This should be a rare case but the
+ # self._body can be set to None while
+ # the task is being started or we wait above
+ # for the 100-continue response.
+ # The more likely case is we have an empty
+ # payload, but 100-continue is still expected.
+ if self._body is not None:
+ await self._body.write_with_length(writer, content_length)
+ except OSError as underlying_exc:
+ reraised_exc = underlying_exc
- self._closed = True
- if self._loop.is_closed():
- return
+ # Distinguish between timeout and other OS errors for better error reporting
+ exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
+ underlying_exc, asyncio.TimeoutError
+ )
+ if exc_is_not_timeout:
+ reraised_exc = ClientOSError(
+ underlying_exc.errno,
+ f"Can not write request body for {self.url !s}",
+ )
- self._cleanup_writer()
- if self._connection is not None:
- self._connection.close()
- self._connection = None
+ set_exception(protocol, reraised_exc, underlying_exc)
+ except asyncio.CancelledError:
+ # Body hasn't been fully sent, so connection can't be reused
+ conn.close()
+ raise
+ except Exception as underlying_exc:
+ set_exception(
+ protocol,
+ ClientConnectionError(
+ "Failed to send bytes into the underlying connection "
+ f"{conn !s}: {underlying_exc!r}",
+ ),
+ underlying_exc,
+ )
+ else:
+ # Successfully wrote the body, signal EOF and start response timeout
+ await writer.write_eof()
+ protocol.start_timeout()
- def release(self) -> None:
- if not self._released:
- self._notify_content()
+ async def send(self, conn: "Connection") -> "ClientResponse":
+ # Specify request target:
+ # - CONNECT request must send authority form URI
+ # - not CONNECT proxy must send absolute form URI
+ # - most common is origin form URI
+ if self.method == hdrs.METH_CONNECT:
+ connect_host = self.url.host_subcomponent
+ assert connect_host is not None
+ path = f"{connect_host}:{self.url.port}"
+ elif self.proxy and not self.is_ssl():
+ path = str(self.url)
+ else:
+ path = self.url.raw_path_qs
- self._closed = True
+ protocol = conn.protocol
+ assert protocol is not None
+ writer = StreamWriter(
+ protocol,
+ self.loop,
+ on_chunk_sent=(
+ functools.partial(self._on_chunk_request_sent, self.method, self.url)
+ if self._traces
+ else None
+ ),
+ on_headers_sent=(
+ functools.partial(self._on_headers_request_sent, self.method, self.url)
+ if self._traces
+ else None
+ ),
+ )
- self._cleanup_writer()
- self._release_connection()
+ if self.compress:
+ writer.enable_compression(self.compress)
- @property
- def ok(self) -> bool:
- """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
+ if self.chunked is not None:
+ writer.enable_chunking()
- This is **not** a check for ``200 OK`` but a check that the response
- status is under 400.
- """
- return 400 > self.status
+ # set default content-type
+ if (
+ self.method in self.POST_METHODS
+ and (
+ self._skip_auto_headers is None
+ or hdrs.CONTENT_TYPE not in self._skip_auto_headers
+ )
+ and hdrs.CONTENT_TYPE not in self.headers
+ ):
+ self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
- def raise_for_status(self) -> None:
- if not self.ok:
- # reason should always be not None for a started response
- assert self.reason is not None
+ v = self.version
+ if hdrs.CONNECTION not in self.headers:
+ if conn._connector.force_close:
+ if v == HttpVersion11:
+ self.headers[hdrs.CONNECTION] = "close"
+ elif v == HttpVersion10:
+ self.headers[hdrs.CONNECTION] = "keep-alive"
- # If we're in a context we can rely on __aexit__() to release as the
- # exception propagates.
- if not self._in_context:
- self.release()
+ # status + headers
+ status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
- raise ClientResponseError(
- self.request_info,
- self.history,
- status=self.status,
- message=self.reason,
- headers=self.headers,
- )
+ # Buffer headers for potential coalescing with body
+ await writer.write_headers(status_line, self.headers)
- def _release_connection(self) -> None:
- if self._connection is not None:
- if self.__writer is None:
- self._connection.release()
- self._connection = None
+ task: Optional["asyncio.Task[None]"]
+ if self._body or self._continue is not None or protocol.writing_paused:
+ coro = self.write_bytes(writer, conn, self._get_content_length())
+ if sys.version_info >= (3, 12):
+ # Optimization for Python 3.12, try to write
+ # bytes immediately to avoid having to schedule
+ # the task on the event loop.
+ task = asyncio.Task(coro, loop=self.loop, eager_start=True)
else:
- self.__writer.add_done_callback(lambda f: self._release_connection())
+ task = self.loop.create_task(coro)
+ if task.done():
+ task = None
+ else:
+ self._writer = task
+ else:
+ # We have nothing to write because
+ # - there is no body
+ # - the protocol does not have writing paused
+ # - we are not waiting for a 100-continue response
+ protocol.start_timeout()
+ writer.set_eof()
+ task = None
+ response_class = self.response_class
+ assert response_class is not None
+ self.response = response_class(
+ self.method,
+ self.original_url,
+ writer=task,
+ continue100=self._continue,
+ timer=self._timer,
+ request_info=self.request_info,
+ traces=self._traces,
+ loop=self.loop,
+ session=self._session,
+ )
+ return self.response
- async def _wait_released(self) -> None:
+ async def close(self) -> None:
if self.__writer is not None:
try:
await self.__writer
@@ -1319,126 +1423,20 @@ async def _wait_released(self) -> None:
and task.cancelling()
):
raise
- self._release_connection()
-
- def _cleanup_writer(self) -> None:
- if self.__writer is not None:
- self.__writer.cancel()
- self._session = None
-
- def _notify_content(self) -> None:
- content = self.content
- # content can be None here, but the types are cheated elsewhere.
- if content and content.exception() is None: # type: ignore[truthy-bool]
- set_exception(content, _CONNECTION_CLOSED_EXCEPTION)
- self._released = True
- async def wait_for_close(self) -> None:
+ def terminate(self) -> None:
if self.__writer is not None:
- try:
- await self.__writer
- except asyncio.CancelledError:
- if (
- sys.version_info >= (3, 11)
- and (task := asyncio.current_task())
- and task.cancelling()
- ):
- raise
- self.release()
-
- async def read(self) -> bytes:
- """Read response payload."""
- if self._body is None:
- try:
- self._body = await self.content.read()
- for trace in self._traces:
- await trace.send_response_chunk_received(
- self.method, self.url, self._body
- )
- except BaseException:
- self.close()
- raise
- elif self._released: # Response explicitly released
- raise ClientConnectionError("Connection closed")
-
- protocol = self._connection and self._connection.protocol
- if protocol is None or not protocol.upgraded:
- await self._wait_released() # Underlying connection released
- return self._body
-
- def get_encoding(self) -> str:
- ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
- mimetype = helpers.parse_mimetype(ctype)
-
- encoding = mimetype.parameters.get("charset")
- if encoding:
- with contextlib.suppress(LookupError, ValueError):
- return codecs.lookup(encoding).name
-
- if mimetype.type == "application" and (
- mimetype.subtype == "json" or mimetype.subtype == "rdap"
- ):
- # RFC 7159 states that the default encoding is UTF-8.
- # RFC 7483 defines application/rdap+json
- return "utf-8"
-
- if self._body is None:
- raise RuntimeError(
- "Cannot compute fallback encoding of a not yet read body"
- )
-
- return self._resolve_charset(self, self._body)
-
- async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
- """Read response payload and decode."""
- await self.read()
-
- if encoding is None:
- encoding = self.get_encoding()
-
- return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]
-
- async def json(
- self,
- *,
- encoding: Optional[str] = None,
- loads: JSONDecoder = DEFAULT_JSON_DECODER,
- content_type: Optional[str] = "application/json",
- ) -> Any:
- """Read and decodes JSON response."""
- await self.read()
-
- if content_type:
- if not is_expected_content_type(self.content_type, content_type):
- raise ContentTypeError(
- self.request_info,
- self.history,
- status=self.status,
- message=(
- "Attempt to decode JSON with "
- "unexpected mimetype: %s" % self.content_type
- ),
- headers=self.headers,
- )
-
- if encoding is None:
- encoding = self.get_encoding()
-
- return loads(self._body.decode(encoding)) # type: ignore[union-attr]
+ if not self.loop.is_closed():
+ self.__writer.cancel()
+ self.__writer.remove_done_callback(self.__reset_writer)
+ self.__writer = None
- async def __aenter__(self) -> "ClientResponse":
- self._in_context = True
- return self
+ async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
+ for trace in self._traces:
+ await trace.send_request_chunk_sent(method, url, chunk)
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
+ async def _on_headers_request_sent(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
) -> None:
- self._in_context = False
- # similar to _RequestContextManager, we do not need to check
- # for exceptions, response object can close connection
- # if state is broken
- self.release()
- await self.wait_for_close()
+ for trace in self._traces:
+ await trace.send_request_headers(method, url, headers)
From f45de4222f5724e88d06190d69dd46ae74c5b50a Mon Sep 17 00:00:00 2001
From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com>
Date: Mon, 26 May 2025 13:49:19 +0000
Subject: [PATCH 08/13] [PR #11013/3f7a2e98 backport][3.12] Readjust baseline
for ClientRequest benchmarks (#11015)
Co-authored-by: Sam Bull
Co-authored-by: J. Nick Koston
---
tests/test_benchmarks_client_request.py | 73 +++++++++++++++++++++----
1 file changed, 62 insertions(+), 11 deletions(-)
diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py
index 65667995185..34ae3629f9e 100644
--- a/tests/test_benchmarks_client_request.py
+++ b/tests/test_benchmarks_client_request.py
@@ -1,27 +1,38 @@
"""codspeed benchmarks for client requests."""
import asyncio
-from http.cookies import Morsel
+from http.cookies import BaseCookie
from typing import Union
+from multidict import CIMultiDict
from pytest_codspeed import BenchmarkFixture
from yarl import URL
-from aiohttp.client_reqrep import ClientRequest
+from aiohttp.client_reqrep import ClientRequest, ClientResponse
+from aiohttp.cookiejar import CookieJar
+from aiohttp.helpers import TimerNoop
from aiohttp.http_writer import HttpVersion11
+from aiohttp.tracing import Trace
def test_client_request_update_cookies(
loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture
) -> None:
- req = ClientRequest("get", URL("http://python.org"), loop=loop)
- morsel: "Morsel[str]" = Morsel()
- morsel.set(key="string", val="Another string", coded_val="really")
- morsel_cookie = {"str": morsel}
+ url = URL("http://python.org")
+
+ async def setup():
+ cookie_jar = CookieJar()
+ cookie_jar.update_cookies({"string": "Another string"})
+ cookies = cookie_jar.filter_cookies(url)
+ assert cookies["string"].value == "Another string"
+ req = ClientRequest("get", url, loop=loop)
+ return req, cookies
+
+ req, cookies = loop.run_until_complete(setup())
@benchmark
def _run() -> None:
- req.update_cookies(cookies=morsel_cookie)
+ req.update_cookies(cookies=cookies)
def test_create_client_request_with_cookies(
@@ -29,15 +40,39 @@ def test_create_client_request_with_cookies(
) -> None:
url = URL("http://python.org")
+ async def setup():
+ cookie_jar = CookieJar()
+ cookie_jar.update_cookies({"cookie": "value"})
+ cookies = cookie_jar.filter_cookies(url)
+ assert cookies["cookie"].value == "value"
+ return cookies
+
+ cookies = loop.run_until_complete(setup())
+ timer = TimerNoop()
+ traces: list[Trace] = []
+ headers = CIMultiDict[str]()
+
@benchmark
def _run() -> None:
ClientRequest(
method="get",
url=url,
loop=loop,
- headers=None,
+ params=None,
+ skip_auto_headers=None,
+ response_class=ClientResponse,
+ proxy=None,
+ proxy_auth=None,
+ proxy_headers=None,
+ timer=timer,
+ session=None,
+ ssl=True,
+ traces=traces,
+ trust_env=False,
+ server_hostname=None,
+ headers=headers,
data=None,
- cookies={"cookie": "value"},
+ cookies=cookies,
auth=None,
version=HttpVersion11,
compress=False,
@@ -50,6 +85,10 @@ def test_create_client_request_with_headers(
loop: asyncio.AbstractEventLoop, benchmark: BenchmarkFixture
) -> None:
url = URL("http://python.org")
+ timer = TimerNoop()
+ traces: list[Trace] = []
+ headers = CIMultiDict({"header": "value", "another": "header"})
+ cookies = BaseCookie[str]()
@benchmark
def _run() -> None:
@@ -57,9 +96,21 @@ def _run() -> None:
method="get",
url=url,
loop=loop,
- headers={"header": "value", "another": "header"},
+ params=None,
+ skip_auto_headers=None,
+ response_class=ClientResponse,
+ proxy=None,
+ proxy_auth=None,
+ proxy_headers=None,
+ timer=timer,
+ session=None,
+ ssl=True,
+ traces=traces,
+ trust_env=False,
+ server_hostname=None,
+ headers=headers,
data=None,
- cookies=None,
+ cookies=cookies,
auth=None,
version=HttpVersion11,
compress=False,
From c6f67b03363534541d5834bb161ee25161334cd9 Mon Sep 17 00:00:00 2001
From: "J. Nick Koston"
Date: Mon, 26 May 2025 09:00:15 -0500
Subject: [PATCH 09/13] [PR #11020/852297c backport][3.12] Cleanup some type
ignores in the client request tests (#11026)
---
tests/test_client_request.py | 6 ++++--
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index f880bb0859f..e8381a3ef77 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -751,7 +751,8 @@ async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> No
)
resp = await req.send(conn)
assert "/" == req.url.path
- assert b"life=42" == req.body._value # type: ignore[union-attr]
+ assert isinstance(req.body, payload.Payload)
+ assert b"life=42" == req.body._value
assert "application/x-www-form-urlencoded" == req.headers["CONTENT-TYPE"]
await req.close()
resp.close()
@@ -788,7 +789,8 @@ async def test_get_with_data(loop) -> None:
meth, URL("http://python.org/"), data={"life": "42"}, loop=loop
)
assert "/" == req.url.path
- assert b"life=42" == req.body._value # type: ignore[union-attr]
+ assert isinstance(req.body, payload.Payload)
+ assert b"life=42" == req.body._value
await req.close()
From 824d387805118ce4a5749c3de52addc1b2a1e998 Mon Sep 17 00:00:00 2001
From: Sam Bull
Date: Mon, 26 May 2025 15:22:07 +0100
Subject: [PATCH 10/13] Move ClientResponse to top of file (#11029)
---
aiohttp/client_reqrep.py | 2066 +++++++++++++++++++-------------------
1 file changed, 1032 insertions(+), 1034 deletions(-)
diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py
index 614751a17bd..41acec87712 100644
--- a/aiohttp/client_reqrep.py
+++ b/aiohttp/client_reqrep.py
@@ -94,6 +94,7 @@
from .tracing import Trace
+_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed")
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
@@ -272,1138 +273,1222 @@ def _warn_if_unclosed_payload(payload: payload.Payload, stacklevel: int = 2) ->
)
-class ClientRequest:
- GET_METHODS = {
- hdrs.METH_GET,
- hdrs.METH_HEAD,
- hdrs.METH_OPTIONS,
- hdrs.METH_TRACE,
- }
- POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
- ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
-
- DEFAULT_HEADERS = {
- hdrs.ACCEPT: "*/*",
- hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
- }
-
- # Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
- _body: Union[None, payload.Payload] = None
- auth = None
- response = None
+class ClientResponse(HeadersMixin):
- __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data
+ # Some of these attributes are None when created,
+ # but will be set by the start() method.
+ # As the end user will likely never see the None values, we cheat the types below.
+ # from the Status-Line of the response
+ version: Optional[HttpVersion] = None # HTTP-Version
+ status: int = None # type: ignore[assignment] # Status-Code
+ reason: Optional[str] = None # Reason-Phrase
- # These class defaults help create_autospec() work correctly.
- # If autospec is improved in future, maybe these can be removed.
- url = URL()
- method = "GET"
+ content: StreamReader = None # type: ignore[assignment] # Payload stream
+ _body: Optional[bytes] = None
+ _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
+ _history: Tuple["ClientResponse", ...] = ()
+ _raw_headers: RawHeaders = None # type: ignore[assignment]
- _continue = None # waiter future for '100 Continue' response
+ _connection: Optional["Connection"] = None # current connection
+ _cookies: Optional[SimpleCookie] = None
+ _continue: Optional["asyncio.Future[bool]"] = None
+ _source_traceback: Optional[traceback.StackSummary] = None
+ _session: Optional["ClientSession"] = None
+ # set up by ClientRequest after ClientResponse object creation
+ # post-init stage allows to not change ctor signature
+ _closed = True # to allow __del__ for non-initialized properly response
+ _released = False
+ _in_context = False
- _skip_auto_headers: Optional["CIMultiDict[None]"] = None
+ _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8"
- # N.B.
- # Adding __del__ method with self._writer closing doesn't make sense
- # because _writer is instance method, thus it keeps a reference to self.
- # Until writer has finished finalizer will not be called.
+ __writer: Optional["asyncio.Task[None]"] = None
def __init__(
self,
method: str,
url: URL,
*,
- params: Query = None,
- headers: Optional[LooseHeaders] = None,
- skip_auto_headers: Optional[Iterable[str]] = None,
- data: Any = None,
- cookies: Optional[LooseCookies] = None,
- auth: Optional[BasicAuth] = None,
- version: http.HttpVersion = http.HttpVersion11,
- compress: Union[str, bool, None] = None,
- chunked: Optional[bool] = None,
- expect100: bool = False,
- loop: Optional[asyncio.AbstractEventLoop] = None,
- response_class: Optional[Type["ClientResponse"]] = None,
- proxy: Optional[URL] = None,
- proxy_auth: Optional[BasicAuth] = None,
- timer: Optional[BaseTimerContext] = None,
- session: Optional["ClientSession"] = None,
- ssl: Union[SSLContext, bool, Fingerprint] = True,
- proxy_headers: Optional[LooseHeaders] = None,
- traces: Optional[List["Trace"]] = None,
- trust_env: bool = False,
- server_hostname: Optional[str] = None,
- ):
- if loop is None:
- loop = asyncio.get_event_loop()
- if match := _CONTAINS_CONTROL_CHAR_RE.search(method):
- raise ValueError(
- f"Method cannot contain non-token characters {method!r} "
- f"(found at least {match.group()!r})"
- )
+ writer: "Optional[asyncio.Task[None]]",
+ continue100: Optional["asyncio.Future[bool]"],
+ timer: BaseTimerContext,
+ request_info: RequestInfo,
+ traces: List["Trace"],
+ loop: asyncio.AbstractEventLoop,
+ session: "ClientSession",
+ ) -> None:
# URL forbids subclasses, so a simple type check is enough.
- assert type(url) is URL, url
- if proxy is not None:
- assert type(proxy) is URL, proxy
- # FIXME: session is None in tests only, need to fix tests
- # assert session is not None
- if TYPE_CHECKING:
- assert session is not None
- self._session = session
- if params:
- url = url.extend_query(params)
- self.original_url = url
- self.url = url.with_fragment(None) if url.raw_fragment else url
- self.method = method.upper()
- self.chunked = chunked
- self.compress = compress
- self.loop = loop
- self.length = None
- if response_class is None:
- real_response_class = ClientResponse
- else:
- real_response_class = response_class
- self.response_class: Type[ClientResponse] = real_response_class
- self._timer = timer if timer is not None else TimerNoop()
- self._ssl = ssl if ssl is not None else True
- self.server_hostname = server_hostname
+ assert type(url) is URL
+
+ self.method = method
+ self._real_url = url
+ self._url = url.with_fragment(None) if url.raw_fragment else url
+ if writer is not None:
+ self._writer = writer
+ if continue100 is not None:
+ self._continue = continue100
+ self._request_info = request_info
+ self._timer = timer if timer is not None else TimerNoop()
+ self._cache: Dict[str, Any] = {}
+ self._traces = traces
+ self._loop = loop
+ # Save reference to _resolve_charset, so that get_encoding() will still
+ # work after the response has finished reading the body.
+ # TODO: Fix session=None in tests (see ClientRequest.__init__).
+ if session is not None:
+ # store a reference to session #1985
+ self._session = session
+ self._resolve_charset = session._resolve_charset
if loop.get_debug():
self._source_traceback = traceback.extract_stack(sys._getframe(1))
- self.update_version(version)
- self.update_host(url)
- self.update_headers(headers)
- self.update_auto_headers(skip_auto_headers)
- self.update_cookies(cookies)
- self.update_content_encoding(data)
- self.update_auth(auth, trust_env)
- self.update_proxy(proxy, proxy_auth, proxy_headers)
-
- self.update_body_from_data(data)
- if data is not None or self.method not in self.GET_METHODS:
- self.update_transfer_encoding()
- self.update_expect_continue(expect100)
- self._traces = [] if traces is None else traces
-
def __reset_writer(self, _: object = None) -> None:
self.__writer = None
- def _get_content_length(self) -> Optional[int]:
- """Extract and validate Content-Length header value.
-
- Returns parsed Content-Length value or None if not set.
- Raises ValueError if header exists but cannot be parsed as an integer.
- """
- if hdrs.CONTENT_LENGTH not in self.headers:
- return None
-
- content_length_hdr = self.headers[hdrs.CONTENT_LENGTH]
- try:
- return int(content_length_hdr)
- except ValueError:
- raise ValueError(
- f"Invalid Content-Length header: {content_length_hdr}"
- ) from None
-
- @property
- def skip_auto_headers(self) -> CIMultiDict[None]:
- return self._skip_auto_headers or CIMultiDict()
-
@property
def _writer(self) -> Optional["asyncio.Task[None]"]:
+ """The writer task for streaming data.
+
+ _writer is only provided for backwards compatibility
+ for subclasses that may need to access it.
+ """
return self.__writer
@_writer.setter
- def _writer(self, writer: "asyncio.Task[None]") -> None:
+ def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
+ """Set the writer task for streaming data."""
if self.__writer is not None:
self.__writer.remove_done_callback(self.__reset_writer)
self.__writer = writer
- writer.add_done_callback(self.__reset_writer)
-
- def is_ssl(self) -> bool:
- return self.url.scheme in _SSL_SCHEMES
-
- @property
- def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
- return self._ssl
-
- @property
- def connection_key(self) -> ConnectionKey:
- if proxy_headers := self.proxy_headers:
- h: Optional[int] = hash(tuple(proxy_headers.items()))
+ if writer is None:
+ return
+ if writer.done():
+ # The writer is already done, so we can clear it immediately.
+ self.__writer = None
else:
- h = None
- url = self.url
- return tuple.__new__(
- ConnectionKey,
- (
- url.raw_host or "",
- url.port,
- url.scheme in _SSL_SCHEMES,
- self._ssl,
- self.proxy,
- self.proxy_auth,
- h,
- ),
- )
+ writer.add_done_callback(self.__reset_writer)
@property
- def host(self) -> str:
- ret = self.url.raw_host
- assert ret is not None
- return ret
+ def cookies(self) -> SimpleCookie:
+ if self._cookies is None:
+ self._cookies = SimpleCookie()
+ return self._cookies
- @property
- def port(self) -> Optional[int]:
- return self.url.port
+ @cookies.setter
+ def cookies(self, cookies: SimpleCookie) -> None:
+ self._cookies = cookies
- @property
- def body(self) -> Union[payload.Payload, Literal[b""]]:
- """Request body."""
- # empty body is represented as bytes for backwards compatibility
- return self._body or b""
+ @reify
+ def url(self) -> URL:
+ return self._url
- @body.setter
- def body(self, value: Any) -> None:
- """Set request body with warning for non-autoclose payloads.
+ @reify
+ def url_obj(self) -> URL:
+ warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
+ return self._url
- WARNING: This setter must be called from within an event loop and is not
- thread-safe. Setting body outside of an event loop may raise RuntimeError
- when closing file-based payloads.
+ @reify
+ def real_url(self) -> URL:
+ return self._real_url
- DEPRECATED: Direct assignment to body is deprecated and will be removed
- in a future version. Use await update_body() instead for proper resource
- management.
- """
- # Close existing payload if present
- if self._body is not None:
- # Warn if the payload needs manual closing
- # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload
- _warn_if_unclosed_payload(self._body, stacklevel=3)
- # NOTE: In the future, when we remove sync close support,
- # this setter will need to be removed and only the async
- # update_body() method will be available. For now, we call
- # _close() for backwards compatibility.
- self._body._close()
- self._update_body(value)
+ @reify
+ def host(self) -> str:
+ assert self._url.host is not None
+ return self._url.host
- @property
- def request_info(self) -> RequestInfo:
- headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
- # These are created on every request, so we use a NamedTuple
- # for performance reasons. We don't use the RequestInfo.__new__
- # method because it has a different signature which is provided
- # for backwards compatibility only.
- return tuple.__new__(
- RequestInfo, (self.url, self.method, headers, self.original_url)
- )
+ @reify
+ def headers(self) -> "CIMultiDictProxy[str]":
+ return self._headers
- @property
- def session(self) -> "ClientSession":
- """Return the ClientSession instance.
+ @reify
+ def raw_headers(self) -> RawHeaders:
+ return self._raw_headers
- This property provides access to the ClientSession that initiated
- this request, allowing middleware to make additional requests
- using the same session.
- """
- return self._session
+ @reify
+ def request_info(self) -> RequestInfo:
+ return self._request_info
- def update_host(self, url: URL) -> None:
- """Update destination host, port and connection type (ssl)."""
- # get host/port
- if not url.raw_host:
- raise InvalidURL(url)
+ @reify
+ def content_disposition(self) -> Optional[ContentDisposition]:
+ raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
+ if raw is None:
+ return None
+ disposition_type, params_dct = multipart.parse_content_disposition(raw)
+ params = MappingProxyType(params_dct)
+ filename = multipart.content_disposition_filename(params)
+ return ContentDisposition(disposition_type, params, filename)
- # basic auth info
- if url.raw_user or url.raw_password:
- self.auth = helpers.BasicAuth(url.user or "", url.password or "")
+ def __del__(self, _warnings: Any = warnings) -> None:
+ if self._closed:
+ return
- def update_version(self, version: Union[http.HttpVersion, str]) -> None:
- """Convert request version to two elements tuple.
+ if self._connection is not None:
+ self._connection.release()
+ self._cleanup_writer()
- parser HTTP version '1.1' => (1, 1)
- """
- if isinstance(version, str):
- v = [part.strip() for part in version.split(".", 1)]
- try:
- version = http.HttpVersion(int(v[0]), int(v[1]))
- except ValueError:
- raise ValueError(
- f"Can not parse http version number: {version}"
- ) from None
- self.version = version
+ if self._loop.get_debug():
+ kwargs = {"source": self}
+ _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
+ context = {"client_response": self, "message": "Unclosed response"}
+ if self._source_traceback:
+ context["source_traceback"] = self._source_traceback
+ self._loop.call_exception_handler(context)
- def update_headers(self, headers: Optional[LooseHeaders]) -> None:
- """Update request headers."""
- self.headers: CIMultiDict[str] = CIMultiDict()
+ def __repr__(self) -> str:
+ out = io.StringIO()
+ ascii_encodable_url = str(self.url)
+ if self.reason:
+ ascii_encodable_reason = self.reason.encode(
+ "ascii", "backslashreplace"
+ ).decode("ascii")
+ else:
+ ascii_encodable_reason = "None"
+ print(
+ "".format(
+ ascii_encodable_url, self.status, ascii_encodable_reason
+ ),
+ file=out,
+ )
+ print(self.headers, file=out)
+ return out.getvalue()
- # Build the host header
- host = self.url.host_port_subcomponent
+ @property
+ def connection(self) -> Optional["Connection"]:
+ return self._connection
- # host_port_subcomponent is None when the URL is a relative URL.
- # but we know we do not have a relative URL here.
- assert host is not None
- self.headers[hdrs.HOST] = host
+ @reify
+ def history(self) -> Tuple["ClientResponse", ...]:
+ """A sequence of of responses, if redirects occurred."""
+ return self._history
- if not headers:
- return
+ @reify
+ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
+ links_str = ", ".join(self.headers.getall("link", []))
- if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
- headers = headers.items()
+ if not links_str:
+ return MultiDictProxy(MultiDict())
- for key, value in headers: # type: ignore[misc]
- # A special case for Host header
- if key in hdrs.HOST_ALL:
- self.headers[key] = value
- else:
- self.headers.add(key, value)
+ links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
- def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
- if skip_auto_headers is not None:
- self._skip_auto_headers = CIMultiDict(
- (hdr, None) for hdr in sorted(skip_auto_headers)
- )
- used_headers = self.headers.copy()
- used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type]
- else:
- # Fast path when there are no headers to skip
- # which is the most common case.
- used_headers = self.headers
+ for val in re.split(r",(?=\s*<)", links_str):
+ match = re.match(r"\s*<(.*)>(.*)", val)
+ if match is None: # pragma: no cover
+ # the check exists to suppress mypy error
+ continue
+ url, params_str = match.groups()
+ params = params_str.split(";")[1:]
- for hdr, val in self.DEFAULT_HEADERS.items():
- if hdr not in used_headers:
- self.headers[hdr] = val
+ link: MultiDict[Union[str, URL]] = MultiDict()
- if hdrs.USER_AGENT not in used_headers:
- self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
+ for param in params:
+ match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
+ if match is None: # pragma: no cover
+ # the check exists to suppress mypy error
+ continue
+ key, _, value, _ = match.groups()
- def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
- """Update request cookies header."""
- if not cookies:
- return
+ link.add(key, value)
- c = SimpleCookie()
- if hdrs.COOKIE in self.headers:
- c.load(self.headers.get(hdrs.COOKIE, ""))
- del self.headers[hdrs.COOKIE]
+ key = link.get("rel", url)
- if isinstance(cookies, Mapping):
- iter_cookies = cookies.items()
- else:
- iter_cookies = cookies # type: ignore[assignment]
- for name, value in iter_cookies:
- if isinstance(value, Morsel):
- # Preserve coded_value
- mrsl_val = value.get(value.key, Morsel())
- mrsl_val.set(value.key, value.value, value.coded_value)
- c[name] = mrsl_val
- else:
- c[name] = value # type: ignore[assignment]
+ link.add("url", self.url.join(URL(url)))
- self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
+ links.add(str(key), MultiDictProxy(link))
- def update_content_encoding(self, data: Any) -> None:
- """Set request content encoding."""
- if not data:
- # Don't compress an empty body.
- self.compress = None
- return
+ return MultiDictProxy(links)
- if self.headers.get(hdrs.CONTENT_ENCODING):
- if self.compress:
- raise ValueError(
- "compress can not be set if Content-Encoding header is set"
- )
- elif self.compress:
- if not isinstance(self.compress, str):
- self.compress = "deflate"
- self.headers[hdrs.CONTENT_ENCODING] = self.compress
- self.chunked = True # enable chunked, no need to deal with length
+ async def start(self, connection: "Connection") -> "ClientResponse":
+ """Start response processing."""
+ self._closed = False
+ self._protocol = connection.protocol
+ self._connection = connection
- def update_transfer_encoding(self) -> None:
- """Analyze transfer-encoding header."""
- te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
+ with self._timer:
+ while True:
+ # read response
+ try:
+ protocol = self._protocol
+ message, payload = await protocol.read() # type: ignore[union-attr]
+ except http.HttpProcessingError as exc:
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=exc.code,
+ message=exc.message,
+ headers=exc.headers,
+ ) from exc
- if "chunked" in te:
- if self.chunked:
- raise ValueError(
- "chunked can not be set "
- 'if "Transfer-Encoding: chunked" header is set'
- )
+ if message.code < 100 or message.code > 199 or message.code == 101:
+ break
- elif self.chunked:
- if hdrs.CONTENT_LENGTH in self.headers:
- raise ValueError(
- "chunked can not be set if Content-Length header is set"
- )
+ if self._continue is not None:
+ set_result(self._continue, True)
+ self._continue = None
- self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
- elif (
- self._body is not None
- and hdrs.CONTENT_LENGTH not in self.headers
- and (size := self._body.size) is not None
- ):
- self.headers[hdrs.CONTENT_LENGTH] = str(size)
+ # payload eof handler
+ payload.on_eof(self._response_eof)
- def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
- """Set basic auth."""
- if auth is None:
- auth = self.auth
- if auth is None and trust_env and self.url.host is not None:
- netrc_obj = netrc_from_env()
- with contextlib.suppress(LookupError):
- auth = basicauth_from_netrc(netrc_obj, self.url.host)
- if auth is None:
- return
+ # response status
+ self.version = message.version
+ self.status = message.code
+ self.reason = message.reason
- if not isinstance(auth, helpers.BasicAuth):
- raise TypeError("BasicAuth() tuple is required instead")
+ # headers
+ self._headers = message.headers # type is CIMultiDictProxy
+ self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
- self.headers[hdrs.AUTHORIZATION] = auth.encode()
+ # payload
+ self.content = payload
- def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None:
- """Update request body from data."""
- if self._body is not None:
- _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel)
+ # cookies
+ if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
+ cookies = SimpleCookie()
+ for hdr in cookie_hdrs:
+ try:
+ cookies.load(hdr)
+ except CookieError as exc:
+ client_logger.warning("Can not load response cookies: %s", exc)
+ self._cookies = cookies
+ return self
- if body is None:
- self._body = None
+ def _response_eof(self) -> None:
+ if self._closed:
return
- # FormData
- maybe_payload = body() if isinstance(body, FormData) else body
+ # protocol could be None because connection could be detached
+ protocol = self._connection and self._connection.protocol
+ if protocol is not None and protocol.upgraded:
+ return
- try:
- body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None)
- except payload.LookupError:
- body_payload = FormData(maybe_payload)() # type: ignore[arg-type]
+ self._closed = True
+ self._cleanup_writer()
+ self._release_connection()
- self._body = body_payload
- # enable chunked encoding if needed
- if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
- if (size := body_payload.size) is not None:
- self.headers[hdrs.CONTENT_LENGTH] = str(size)
- else:
- self.chunked = True
+ @property
+ def closed(self) -> bool:
+ return self._closed
- # copy payload headers
- assert body_payload.headers
- headers = self.headers
- skip_headers = self._skip_auto_headers
- for key, value in body_payload.headers.items():
- if key in headers or (skip_headers is not None and key in skip_headers):
- continue
- headers[key] = value
+ def close(self) -> None:
+ if not self._released:
+ self._notify_content()
- def _update_body(self, body: Any) -> None:
- """Update request body after its already been set."""
- # Remove existing Content-Length header since body is changing
- if hdrs.CONTENT_LENGTH in self.headers:
- del self.headers[hdrs.CONTENT_LENGTH]
+ self._closed = True
+ if self._loop is None or self._loop.is_closed():
+ return
- # Remove existing Transfer-Encoding header to avoid conflicts
- if self.chunked and hdrs.TRANSFER_ENCODING in self.headers:
- del self.headers[hdrs.TRANSFER_ENCODING]
+ self._cleanup_writer()
+ if self._connection is not None:
+ self._connection.close()
+ self._connection = None
- # Now update the body using the existing method
- # Called from _update_body, add 1 to stacklevel from caller
- self.update_body_from_data(body, _stacklevel=4)
+ def release(self) -> Any:
+ if not self._released:
+ self._notify_content()
- # Update transfer encoding headers if needed (same logic as __init__)
- if body is not None or self.method not in self.GET_METHODS:
- self.update_transfer_encoding()
+ self._closed = True
- async def update_body(self, body: Any) -> None:
+ self._cleanup_writer()
+ self._release_connection()
+ return noop()
+
+ @property
+ def ok(self) -> bool:
+ """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
+
+ This is **not** a check for ``200 OK`` but a check that the response
+ status is under 400.
"""
- Update request body and close previous payload if needed.
+ return 400 > self.status
- This method safely updates the request body by first closing any existing
- payload to prevent resource leaks, then setting the new body.
+ def raise_for_status(self) -> None:
+ if not self.ok:
+ # reason should always be not None for a started response
+ assert self.reason is not None
- IMPORTANT: Always use this method instead of setting request.body directly.
- Direct assignment to request.body will leak resources if the previous body
- contains file handles, streams, or other resources that need cleanup.
+ # If we're in a context we can rely on __aexit__() to release as the
+ # exception propagates.
+ if not self._in_context:
+ self.release()
- Args:
- body: The new body content. Can be:
- - bytes/bytearray: Raw binary data
- - str: Text data (will be encoded using charset from Content-Type)
- - FormData: Form data that will be encoded as multipart/form-data
- - Payload: A pre-configured payload object
- - AsyncIterable: An async iterable of bytes chunks
- - File-like object: Will be read and sent as binary data
- - None: Clears the body
+ raise ClientResponseError(
+ self.request_info,
+ self.history,
+ status=self.status,
+ message=self.reason,
+ headers=self.headers,
+ )
- Usage:
- # CORRECT: Use update_body
- await request.update_body(b"new request data")
+ def _release_connection(self) -> None:
+ if self._connection is not None:
+ if self.__writer is None:
+ self._connection.release()
+ self._connection = None
+ else:
+ self.__writer.add_done_callback(lambda f: self._release_connection())
- # WRONG: Don't set body directly
- # request.body = b"new request data" # This will leak resources!
+ async def _wait_released(self) -> None:
+ if self.__writer is not None:
+ try:
+ await self.__writer
+ except asyncio.CancelledError:
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+ self._release_connection()
- # Update with form data
- form_data = FormData()
- form_data.add_field('field', 'value')
- await request.update_body(form_data)
+ def _cleanup_writer(self) -> None:
+ if self.__writer is not None:
+ self.__writer.cancel()
+ self._session = None
- # Clear body
- await request.update_body(None)
+ def _notify_content(self) -> None:
+ content = self.content
+ if content and content.exception() is None:
+ set_exception(content, _CONNECTION_CLOSED_EXCEPTION)
+ self._released = True
- Note:
- This method is async because it may need to close file handles or
- other resources associated with the previous payload. Always await
- this method to ensure proper cleanup.
+ async def wait_for_close(self) -> None:
+ if self.__writer is not None:
+ try:
+ await self.__writer
+ except asyncio.CancelledError:
+ if (
+ sys.version_info >= (3, 11)
+ and (task := asyncio.current_task())
+ and task.cancelling()
+ ):
+ raise
+ self.release()
- Warning:
- Setting request.body directly is highly discouraged and can lead to:
- - Resource leaks (unclosed file handles, streams)
- - Memory leaks (unreleased buffers)
- - Unexpected behavior with streaming payloads
+ async def read(self) -> bytes:
+ """Read response payload."""
+ if self._body is None:
+ try:
+ self._body = await self.content.read()
+ for trace in self._traces:
+ await trace.send_response_chunk_received(
+ self.method, self.url, self._body
+ )
+ except BaseException:
+ self.close()
+ raise
+ elif self._released: # Response explicitly released
+ raise ClientConnectionError("Connection closed")
- It is not recommended to change the payload type in middleware. If the
- body was already set (e.g., as bytes), it's best to keep the same type
- rather than converting it (e.g., to str) as this may result in unexpected
- behavior.
+ protocol = self._connection and self._connection.protocol
+ if protocol is None or not protocol.upgraded:
+ await self._wait_released() # Underlying connection released
+ return self._body
- See Also:
- - update_body_from_data: Synchronous body update without cleanup
- - body property: Direct body access (STRONGLY DISCOURAGED)
+ def get_encoding(self) -> str:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ mimetype = helpers.parse_mimetype(ctype)
- """
- # Close existing payload if it exists and needs closing
- if self._body is not None:
- await self._body.close()
- self._update_body(body)
+ encoding = mimetype.parameters.get("charset")
+ if encoding:
+ with contextlib.suppress(LookupError, ValueError):
+ return codecs.lookup(encoding).name
- def update_expect_continue(self, expect: bool = False) -> None:
- if expect:
- self.headers[hdrs.EXPECT] = "100-continue"
- elif (
- hdrs.EXPECT in self.headers
- and self.headers[hdrs.EXPECT].lower() == "100-continue"
+ if mimetype.type == "application" and (
+ mimetype.subtype == "json" or mimetype.subtype == "rdap"
):
- expect = True
+ # RFC 7159 states that the default encoding is UTF-8.
+ # RFC 7483 defines application/rdap+json
+ return "utf-8"
- if expect:
- self._continue = self.loop.create_future()
+ if self._body is None:
+ raise RuntimeError(
+ "Cannot compute fallback encoding of a not yet read body"
+ )
- def update_proxy(
- self,
- proxy: Optional[URL],
- proxy_auth: Optional[BasicAuth],
- proxy_headers: Optional[LooseHeaders],
- ) -> None:
- self.proxy = proxy
- if proxy is None:
- self.proxy_auth = None
- self.proxy_headers = None
- return
+ return self._resolve_charset(self, self._body)
- if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
- raise ValueError("proxy_auth must be None or BasicAuth() tuple")
- self.proxy_auth = proxy_auth
+ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
+ """Read response payload and decode."""
+ if self._body is None:
+ await self.read()
- if proxy_headers is not None and not isinstance(
- proxy_headers, (MultiDict, MultiDictProxy)
- ):
- proxy_headers = CIMultiDict(proxy_headers)
- self.proxy_headers = proxy_headers
+ if encoding is None:
+ encoding = self.get_encoding()
- async def write_bytes(
+ return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]
+
+ async def json(
self,
- writer: AbstractStreamWriter,
- conn: "Connection",
- content_length: Optional[int],
+ *,
+ encoding: Optional[str] = None,
+ loads: JSONDecoder = DEFAULT_JSON_DECODER,
+ content_type: Optional[str] = "application/json",
+ ) -> Any:
+ """Read and decodes JSON response."""
+ if self._body is None:
+ await self.read()
+
+ if content_type:
+ ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
+ if not _is_expected_content_type(ctype, content_type):
+ raise ContentTypeError(
+ self.request_info,
+ self.history,
+ status=self.status,
+ message=(
+ "Attempt to decode JSON with unexpected mimetype: %s" % ctype
+ ),
+ headers=self.headers,
+ )
+
+ stripped = self._body.strip() # type: ignore[union-attr]
+ if not stripped:
+ return None
+
+ if encoding is None:
+ encoding = self.get_encoding()
+
+ return loads(stripped.decode(encoding))
+
+ async def __aenter__(self) -> "ClientResponse":
+ self._in_context = True
+ return self
+
+ async def __aexit__(
+ self,
+ exc_type: Optional[Type[BaseException]],
+ exc_val: Optional[BaseException],
+ exc_tb: Optional[TracebackType],
) -> None:
- """
- Write the request body to the connection stream.
+ self._in_context = False
+ # similar to _RequestContextManager, we do not need to check
+ # for exceptions, response object can close connection
+ # if state is broken
+ self.release()
+ await self.wait_for_close()
- This method handles writing different types of request bodies:
- 1. Payload objects (using their specialized write_with_length method)
- 2. Bytes/bytearray objects
- 3. Iterable body content
- Args:
- writer: The stream writer to write the body to
- conn: The connection being used for this request
- content_length: Optional maximum number of bytes to write from the body
- (None means write the entire body)
+class ClientRequest:
+ GET_METHODS = {
+ hdrs.METH_GET,
+ hdrs.METH_HEAD,
+ hdrs.METH_OPTIONS,
+ hdrs.METH_TRACE,
+ }
+ POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
+ ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
- The method properly handles:
- - Waiting for 100-Continue responses if required
- - Content length constraints for chunked encoding
- - Error handling for network issues, cancellation, and other exceptions
- - Signaling EOF and timeout management
+ DEFAULT_HEADERS = {
+ hdrs.ACCEPT: "*/*",
+ hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
+ }
- Raises:
- ClientOSError: When there's an OS-level error writing the body
- ClientConnectionError: When there's a general connection error
- asyncio.CancelledError: When the operation is cancelled
+ # Type of body depends on PAYLOAD_REGISTRY, which is dynamic.
+ _body: Union[None, payload.Payload] = None
+ auth = None
+ response = None
- """
- # 100 response
- if self._continue is not None:
- # Force headers to be sent before waiting for 100-continue
- writer.send_headers()
- await writer.drain()
- await self._continue
+ __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data
- protocol = conn.protocol
- assert protocol is not None
- try:
- # This should be a rare case but the
- # self._body can be set to None while
- # the task is being started or we wait above
- # for the 100-continue response.
- # The more likely case is we have an empty
- # payload, but 100-continue is still expected.
- if self._body is not None:
- await self._body.write_with_length(writer, content_length)
- except OSError as underlying_exc:
- reraised_exc = underlying_exc
+ # These class defaults help create_autospec() work correctly.
+ # If autospec is improved in future, maybe these can be removed.
+ url = URL()
+ method = "GET"
- # Distinguish between timeout and other OS errors for better error reporting
- exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
- underlying_exc, asyncio.TimeoutError
- )
- if exc_is_not_timeout:
- reraised_exc = ClientOSError(
- underlying_exc.errno,
- f"Can not write request body for {self.url !s}",
- )
+ _continue = None # waiter future for '100 Continue' response
- set_exception(protocol, reraised_exc, underlying_exc)
- except asyncio.CancelledError:
- # Body hasn't been fully sent, so connection can't be reused
- conn.close()
- raise
- except Exception as underlying_exc:
- set_exception(
- protocol,
- ClientConnectionError(
- "Failed to send bytes into the underlying connection "
- f"{conn !s}: {underlying_exc!r}",
- ),
- underlying_exc,
- )
- else:
- # Successfully wrote the body, signal EOF and start response timeout
- await writer.write_eof()
- protocol.start_timeout()
+ _skip_auto_headers: Optional["CIMultiDict[None]"] = None
- async def send(self, conn: "Connection") -> "ClientResponse":
- # Specify request target:
- # - CONNECT request must send authority form URI
- # - not CONNECT proxy must send absolute form URI
- # - most common is origin form URI
- if self.method == hdrs.METH_CONNECT:
- connect_host = self.url.host_subcomponent
- assert connect_host is not None
- path = f"{connect_host}:{self.url.port}"
- elif self.proxy and not self.is_ssl():
- path = str(self.url)
+ # N.B.
+ # Adding __del__ method with self._writer closing doesn't make sense
+ # because _writer is instance method, thus it keeps a reference to self.
+ # Until writer has finished finalizer will not be called.
+
+ def __init__(
+ self,
+ method: str,
+ url: URL,
+ *,
+ params: Query = None,
+ headers: Optional[LooseHeaders] = None,
+ skip_auto_headers: Optional[Iterable[str]] = None,
+ data: Any = None,
+ cookies: Optional[LooseCookies] = None,
+ auth: Optional[BasicAuth] = None,
+ version: http.HttpVersion = http.HttpVersion11,
+ compress: Union[str, bool, None] = None,
+ chunked: Optional[bool] = None,
+ expect100: bool = False,
+ loop: Optional[asyncio.AbstractEventLoop] = None,
+ response_class: Optional[Type["ClientResponse"]] = None,
+ proxy: Optional[URL] = None,
+ proxy_auth: Optional[BasicAuth] = None,
+ timer: Optional[BaseTimerContext] = None,
+ session: Optional["ClientSession"] = None,
+ ssl: Union[SSLContext, bool, Fingerprint] = True,
+ proxy_headers: Optional[LooseHeaders] = None,
+ traces: Optional[List["Trace"]] = None,
+ trust_env: bool = False,
+ server_hostname: Optional[str] = None,
+ ):
+ if loop is None:
+ loop = asyncio.get_event_loop()
+ if match := _CONTAINS_CONTROL_CHAR_RE.search(method):
+ raise ValueError(
+ f"Method cannot contain non-token characters {method!r} "
+ f"(found at least {match.group()!r})"
+ )
+ # URL forbids subclasses, so a simple type check is enough.
+ assert type(url) is URL, url
+ if proxy is not None:
+ assert type(proxy) is URL, proxy
+ # FIXME: session is None in tests only, need to fix tests
+ # assert session is not None
+ if TYPE_CHECKING:
+ assert session is not None
+ self._session = session
+ if params:
+ url = url.extend_query(params)
+ self.original_url = url
+ self.url = url.with_fragment(None) if url.raw_fragment else url
+ self.method = method.upper()
+ self.chunked = chunked
+ self.compress = compress
+ self.loop = loop
+ self.length = None
+ if response_class is None:
+ real_response_class = ClientResponse
else:
- path = self.url.raw_path_qs
+ real_response_class = response_class
+ self.response_class: Type[ClientResponse] = real_response_class
+ self._timer = timer if timer is not None else TimerNoop()
+ self._ssl = ssl if ssl is not None else True
+ self.server_hostname = server_hostname
- protocol = conn.protocol
- assert protocol is not None
- writer = StreamWriter(
- protocol,
- self.loop,
- on_chunk_sent=(
- functools.partial(self._on_chunk_request_sent, self.method, self.url)
- if self._traces
- else None
- ),
- on_headers_sent=(
- functools.partial(self._on_headers_request_sent, self.method, self.url)
- if self._traces
- else None
- ),
- )
+ if loop.get_debug():
+ self._source_traceback = traceback.extract_stack(sys._getframe(1))
- if self.compress:
- writer.enable_compression(self.compress) # type: ignore[arg-type]
+ self.update_version(version)
+ self.update_host(url)
+ self.update_headers(headers)
+ self.update_auto_headers(skip_auto_headers)
+ self.update_cookies(cookies)
+ self.update_content_encoding(data)
+ self.update_auth(auth, trust_env)
+ self.update_proxy(proxy, proxy_auth, proxy_headers)
- if self.chunked is not None:
- writer.enable_chunking()
+ self.update_body_from_data(data)
+ if data is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
+ self.update_expect_continue(expect100)
+ self._traces = [] if traces is None else traces
- # set default content-type
- if (
- self.method in self.POST_METHODS
- and (
- self._skip_auto_headers is None
- or hdrs.CONTENT_TYPE not in self._skip_auto_headers
- )
- and hdrs.CONTENT_TYPE not in self.headers
- ):
- self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
+ def __reset_writer(self, _: object = None) -> None:
+ self.__writer = None
- v = self.version
- if hdrs.CONNECTION not in self.headers:
- if conn._connector.force_close:
- if v == HttpVersion11:
- self.headers[hdrs.CONNECTION] = "close"
- elif v == HttpVersion10:
- self.headers[hdrs.CONNECTION] = "keep-alive"
+ def _get_content_length(self) -> Optional[int]:
+ """Extract and validate Content-Length header value.
- # status + headers
- status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
+ Returns parsed Content-Length value or None if not set.
+ Raises ValueError if header exists but cannot be parsed as an integer.
+ """
+ if hdrs.CONTENT_LENGTH not in self.headers:
+ return None
- # Buffer headers for potential coalescing with body
- await writer.write_headers(status_line, self.headers)
+ content_length_hdr = self.headers[hdrs.CONTENT_LENGTH]
+ try:
+ return int(content_length_hdr)
+ except ValueError:
+ raise ValueError(
+ f"Invalid Content-Length header: {content_length_hdr}"
+ ) from None
- task: Optional["asyncio.Task[None]"]
- if self._body or self._continue is not None or protocol.writing_paused:
- coro = self.write_bytes(writer, conn, self._get_content_length())
- if sys.version_info >= (3, 12):
- # Optimization for Python 3.12, try to write
- # bytes immediately to avoid having to schedule
- # the task on the event loop.
- task = asyncio.Task(coro, loop=self.loop, eager_start=True)
- else:
- task = self.loop.create_task(coro)
- if task.done():
- task = None
- else:
- self._writer = task
- else:
- # We have nothing to write because
- # - there is no body
- # - the protocol does not have writing paused
- # - we are not waiting for a 100-continue response
- protocol.start_timeout()
- writer.set_eof()
- task = None
- response_class = self.response_class
- assert response_class is not None
- self.response = response_class(
- self.method,
- self.original_url,
- writer=task,
- continue100=self._continue,
- timer=self._timer,
- request_info=self.request_info,
- traces=self._traces,
- loop=self.loop,
- session=self._session,
- )
- return self.response
+ @property
+ def skip_auto_headers(self) -> CIMultiDict[None]:
+ return self._skip_auto_headers or CIMultiDict()
- async def close(self) -> None:
- if self.__writer is not None:
- try:
- await self.__writer
- except asyncio.CancelledError:
- if (
- sys.version_info >= (3, 11)
- and (task := asyncio.current_task())
- and task.cancelling()
- ):
- raise
+ @property
+ def _writer(self) -> Optional["asyncio.Task[None]"]:
+ return self.__writer
- def terminate(self) -> None:
+ @_writer.setter
+ def _writer(self, writer: "asyncio.Task[None]") -> None:
if self.__writer is not None:
- if not self.loop.is_closed():
- self.__writer.cancel()
self.__writer.remove_done_callback(self.__reset_writer)
- self.__writer = None
-
- async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
- for trace in self._traces:
- await trace.send_request_chunk_sent(method, url, chunk)
-
- async def _on_headers_request_sent(
- self, method: str, url: URL, headers: "CIMultiDict[str]"
- ) -> None:
- for trace in self._traces:
- await trace.send_request_headers(method, url, headers)
+ self.__writer = writer
+ writer.add_done_callback(self.__reset_writer)
+ def is_ssl(self) -> bool:
+ return self.url.scheme in _SSL_SCHEMES
-_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed")
+ @property
+ def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
+ return self._ssl
+ @property
+ def connection_key(self) -> ConnectionKey:
+ if proxy_headers := self.proxy_headers:
+ h: Optional[int] = hash(tuple(proxy_headers.items()))
+ else:
+ h = None
+ url = self.url
+ return tuple.__new__(
+ ConnectionKey,
+ (
+ url.raw_host or "",
+ url.port,
+ url.scheme in _SSL_SCHEMES,
+ self._ssl,
+ self.proxy,
+ self.proxy_auth,
+ h,
+ ),
+ )
-class ClientResponse(HeadersMixin):
+ @property
+ def host(self) -> str:
+ ret = self.url.raw_host
+ assert ret is not None
+ return ret
- # Some of these attributes are None when created,
- # but will be set by the start() method.
- # As the end user will likely never see the None values, we cheat the types below.
- # from the Status-Line of the response
- version: Optional[HttpVersion] = None # HTTP-Version
- status: int = None # type: ignore[assignment] # Status-Code
- reason: Optional[str] = None # Reason-Phrase
+ @property
+ def port(self) -> Optional[int]:
+ return self.url.port
- content: StreamReader = None # type: ignore[assignment] # Payload stream
- _body: Optional[bytes] = None
- _headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
- _history: Tuple["ClientResponse", ...] = ()
- _raw_headers: RawHeaders = None # type: ignore[assignment]
+ @property
+ def body(self) -> Union[payload.Payload, Literal[b""]]:
+ """Request body."""
+ # empty body is represented as bytes for backwards compatibility
+ return self._body or b""
- _connection: Optional["Connection"] = None # current connection
- _cookies: Optional[SimpleCookie] = None
- _continue: Optional["asyncio.Future[bool]"] = None
- _source_traceback: Optional[traceback.StackSummary] = None
- _session: Optional["ClientSession"] = None
- # set up by ClientRequest after ClientResponse object creation
- # post-init stage allows to not change ctor signature
- _closed = True # to allow __del__ for non-initialized properly response
- _released = False
- _in_context = False
+ @body.setter
+ def body(self, value: Any) -> None:
+ """Set request body with warning for non-autoclose payloads.
- _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8"
+ WARNING: This setter must be called from within an event loop and is not
+ thread-safe. Setting body outside of an event loop may raise RuntimeError
+ when closing file-based payloads.
- __writer: Optional["asyncio.Task[None]"] = None
+ DEPRECATED: Direct assignment to body is deprecated and will be removed
+ in a future version. Use await update_body() instead for proper resource
+ management.
+ """
+ # Close existing payload if present
+ if self._body is not None:
+ # Warn if the payload needs manual closing
+ # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload
+ _warn_if_unclosed_payload(self._body, stacklevel=3)
+ # NOTE: In the future, when we remove sync close support,
+ # this setter will need to be removed and only the async
+ # update_body() method will be available. For now, we call
+ # _close() for backwards compatibility.
+ self._body._close()
+ self._update_body(value)
- def __init__(
- self,
- method: str,
- url: URL,
- *,
- writer: "Optional[asyncio.Task[None]]",
- continue100: Optional["asyncio.Future[bool]"],
- timer: BaseTimerContext,
- request_info: RequestInfo,
- traces: List["Trace"],
- loop: asyncio.AbstractEventLoop,
- session: "ClientSession",
- ) -> None:
- # URL forbids subclasses, so a simple type check is enough.
- assert type(url) is URL
+ @property
+ def request_info(self) -> RequestInfo:
+ headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
+ # These are created on every request, so we use a NamedTuple
+ # for performance reasons. We don't use the RequestInfo.__new__
+ # method because it has a different signature which is provided
+ # for backwards compatibility only.
+ return tuple.__new__(
+ RequestInfo, (self.url, self.method, headers, self.original_url)
+ )
- self.method = method
+ @property
+ def session(self) -> "ClientSession":
+ """Return the ClientSession instance.
- self._real_url = url
- self._url = url.with_fragment(None) if url.raw_fragment else url
- if writer is not None:
- self._writer = writer
- if continue100 is not None:
- self._continue = continue100
- self._request_info = request_info
- self._timer = timer if timer is not None else TimerNoop()
- self._cache: Dict[str, Any] = {}
- self._traces = traces
- self._loop = loop
- # Save reference to _resolve_charset, so that get_encoding() will still
- # work after the response has finished reading the body.
- # TODO: Fix session=None in tests (see ClientRequest.__init__).
- if session is not None:
- # store a reference to session #1985
- self._session = session
- self._resolve_charset = session._resolve_charset
- if loop.get_debug():
- self._source_traceback = traceback.extract_stack(sys._getframe(1))
+ This property provides access to the ClientSession that initiated
+ this request, allowing middleware to make additional requests
+ using the same session.
+ """
+ return self._session
- def __reset_writer(self, _: object = None) -> None:
- self.__writer = None
+ def update_host(self, url: URL) -> None:
+ """Update destination host, port and connection type (ssl)."""
+ # get host/port
+ if not url.raw_host:
+ raise InvalidURL(url)
- @property
- def _writer(self) -> Optional["asyncio.Task[None]"]:
- """The writer task for streaming data.
+ # basic auth info
+ if url.raw_user or url.raw_password:
+ self.auth = helpers.BasicAuth(url.user or "", url.password or "")
- _writer is only provided for backwards compatibility
- for subclasses that may need to access it.
+ def update_version(self, version: Union[http.HttpVersion, str]) -> None:
+ """Convert request version to two elements tuple.
+
+ parser HTTP version '1.1' => (1, 1)
"""
- return self.__writer
+ if isinstance(version, str):
+ v = [part.strip() for part in version.split(".", 1)]
+ try:
+ version = http.HttpVersion(int(v[0]), int(v[1]))
+ except ValueError:
+ raise ValueError(
+ f"Can not parse http version number: {version}"
+ ) from None
+ self.version = version
- @_writer.setter
- def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
- """Set the writer task for streaming data."""
- if self.__writer is not None:
- self.__writer.remove_done_callback(self.__reset_writer)
- self.__writer = writer
- if writer is None:
+ def update_headers(self, headers: Optional[LooseHeaders]) -> None:
+ """Update request headers."""
+ self.headers: CIMultiDict[str] = CIMultiDict()
+
+ # Build the host header
+ host = self.url.host_port_subcomponent
+
+ # host_port_subcomponent is None when the URL is a relative URL.
+ # but we know we do not have a relative URL here.
+ assert host is not None
+ self.headers[hdrs.HOST] = host
+
+ if not headers:
return
- if writer.done():
- # The writer is already done, so we can clear it immediately.
- self.__writer = None
+
+ if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
+ headers = headers.items()
+
+ for key, value in headers: # type: ignore[misc]
+ # A special case for Host header
+ if key in hdrs.HOST_ALL:
+ self.headers[key] = value
+ else:
+ self.headers.add(key, value)
+
+ def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None:
+ if skip_auto_headers is not None:
+ self._skip_auto_headers = CIMultiDict(
+ (hdr, None) for hdr in sorted(skip_auto_headers)
+ )
+ used_headers = self.headers.copy()
+ used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type]
else:
- writer.add_done_callback(self.__reset_writer)
+ # Fast path when there are no headers to skip
+ # which is the most common case.
+ used_headers = self.headers
- @property
- def cookies(self) -> SimpleCookie:
- if self._cookies is None:
- self._cookies = SimpleCookie()
- return self._cookies
+ for hdr, val in self.DEFAULT_HEADERS.items():
+ if hdr not in used_headers:
+ self.headers[hdr] = val
- @cookies.setter
- def cookies(self, cookies: SimpleCookie) -> None:
- self._cookies = cookies
+ if hdrs.USER_AGENT not in used_headers:
+ self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
- @reify
- def url(self) -> URL:
- return self._url
+ def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
+ """Update request cookies header."""
+ if not cookies:
+ return
- @reify
- def url_obj(self) -> URL:
- warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
- return self._url
+ c = SimpleCookie()
+ if hdrs.COOKIE in self.headers:
+ c.load(self.headers.get(hdrs.COOKIE, ""))
+ del self.headers[hdrs.COOKIE]
- @reify
- def real_url(self) -> URL:
- return self._real_url
+ if isinstance(cookies, Mapping):
+ iter_cookies = cookies.items()
+ else:
+ iter_cookies = cookies # type: ignore[assignment]
+ for name, value in iter_cookies:
+ if isinstance(value, Morsel):
+ # Preserve coded_value
+ mrsl_val = value.get(value.key, Morsel())
+ mrsl_val.set(value.key, value.value, value.coded_value)
+ c[name] = mrsl_val
+ else:
+ c[name] = value # type: ignore[assignment]
- @reify
- def host(self) -> str:
- assert self._url.host is not None
- return self._url.host
+ self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
- @reify
- def headers(self) -> "CIMultiDictProxy[str]":
- return self._headers
+ def update_content_encoding(self, data: Any) -> None:
+ """Set request content encoding."""
+ if not data:
+ # Don't compress an empty body.
+ self.compress = None
+ return
- @reify
- def raw_headers(self) -> RawHeaders:
- return self._raw_headers
+ if self.headers.get(hdrs.CONTENT_ENCODING):
+ if self.compress:
+ raise ValueError(
+ "compress can not be set if Content-Encoding header is set"
+ )
+ elif self.compress:
+ if not isinstance(self.compress, str):
+ self.compress = "deflate"
+ self.headers[hdrs.CONTENT_ENCODING] = self.compress
+ self.chunked = True # enable chunked, no need to deal with length
- @reify
- def request_info(self) -> RequestInfo:
- return self._request_info
+ def update_transfer_encoding(self) -> None:
+ """Analyze transfer-encoding header."""
+ te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
- @reify
- def content_disposition(self) -> Optional[ContentDisposition]:
- raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
- if raw is None:
- return None
- disposition_type, params_dct = multipart.parse_content_disposition(raw)
- params = MappingProxyType(params_dct)
- filename = multipart.content_disposition_filename(params)
- return ContentDisposition(disposition_type, params, filename)
+ if "chunked" in te:
+ if self.chunked:
+ raise ValueError(
+ "chunked can not be set "
+ 'if "Transfer-Encoding: chunked" header is set'
+ )
- def __del__(self, _warnings: Any = warnings) -> None:
- if self._closed:
+ elif self.chunked:
+ if hdrs.CONTENT_LENGTH in self.headers:
+ raise ValueError(
+ "chunked can not be set if Content-Length header is set"
+ )
+
+ self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
+ elif (
+ self._body is not None
+ and hdrs.CONTENT_LENGTH not in self.headers
+ and (size := self._body.size) is not None
+ ):
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
+
+ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
+ """Set basic auth."""
+ if auth is None:
+ auth = self.auth
+ if auth is None and trust_env and self.url.host is not None:
+ netrc_obj = netrc_from_env()
+ with contextlib.suppress(LookupError):
+ auth = basicauth_from_netrc(netrc_obj, self.url.host)
+ if auth is None:
return
- if self._connection is not None:
- self._connection.release()
- self._cleanup_writer()
+ if not isinstance(auth, helpers.BasicAuth):
+ raise TypeError("BasicAuth() tuple is required instead")
- if self._loop.get_debug():
- kwargs = {"source": self}
- _warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
- context = {"client_response": self, "message": "Unclosed response"}
- if self._source_traceback:
- context["source_traceback"] = self._source_traceback
- self._loop.call_exception_handler(context)
+ self.headers[hdrs.AUTHORIZATION] = auth.encode()
- def __repr__(self) -> str:
- out = io.StringIO()
- ascii_encodable_url = str(self.url)
- if self.reason:
- ascii_encodable_reason = self.reason.encode(
- "ascii", "backslashreplace"
- ).decode("ascii")
- else:
- ascii_encodable_reason = "None"
- print(
- "".format(
- ascii_encodable_url, self.status, ascii_encodable_reason
- ),
- file=out,
- )
- print(self.headers, file=out)
- return out.getvalue()
+ def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None:
+ """Update request body from data."""
+ if self._body is not None:
+ _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel)
- @property
- def connection(self) -> Optional["Connection"]:
- return self._connection
+ if body is None:
+ self._body = None
+ return
- @reify
- def history(self) -> Tuple["ClientResponse", ...]:
- """A sequence of of responses, if redirects occurred."""
- return self._history
+ # FormData
+ maybe_payload = body() if isinstance(body, FormData) else body
- @reify
- def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
- links_str = ", ".join(self.headers.getall("link", []))
+ try:
+ body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None)
+ except payload.LookupError:
+ body_payload = FormData(maybe_payload)() # type: ignore[arg-type]
- if not links_str:
- return MultiDictProxy(MultiDict())
+ self._body = body_payload
+ # enable chunked encoding if needed
+ if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers:
+ if (size := body_payload.size) is not None:
+ self.headers[hdrs.CONTENT_LENGTH] = str(size)
+ else:
+ self.chunked = True
+
+ # copy payload headers
+ assert body_payload.headers
+ headers = self.headers
+ skip_headers = self._skip_auto_headers
+ for key, value in body_payload.headers.items():
+ if key in headers or (skip_headers is not None and key in skip_headers):
+ continue
+ headers[key] = value
+
+ def _update_body(self, body: Any) -> None:
+ """Update request body after its already been set."""
+ # Remove existing Content-Length header since body is changing
+ if hdrs.CONTENT_LENGTH in self.headers:
+ del self.headers[hdrs.CONTENT_LENGTH]
- links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
+ # Remove existing Transfer-Encoding header to avoid conflicts
+ if self.chunked and hdrs.TRANSFER_ENCODING in self.headers:
+ del self.headers[hdrs.TRANSFER_ENCODING]
- for val in re.split(r",(?=\s*<)", links_str):
- match = re.match(r"\s*<(.*)>(.*)", val)
- if match is None: # pragma: no cover
- # the check exists to suppress mypy error
- continue
- url, params_str = match.groups()
- params = params_str.split(";")[1:]
+ # Now update the body using the existing method
+ # Called from _update_body, add 1 to stacklevel from caller
+ self.update_body_from_data(body, _stacklevel=4)
- link: MultiDict[Union[str, URL]] = MultiDict()
+ # Update transfer encoding headers if needed (same logic as __init__)
+ if body is not None or self.method not in self.GET_METHODS:
+ self.update_transfer_encoding()
- for param in params:
- match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
- if match is None: # pragma: no cover
- # the check exists to suppress mypy error
- continue
- key, _, value, _ = match.groups()
+ async def update_body(self, body: Any) -> None:
+ """
+ Update request body and close previous payload if needed.
- link.add(key, value)
+ This method safely updates the request body by first closing any existing
+ payload to prevent resource leaks, then setting the new body.
- key = link.get("rel", url)
+ IMPORTANT: Always use this method instead of setting request.body directly.
+ Direct assignment to request.body will leak resources if the previous body
+ contains file handles, streams, or other resources that need cleanup.
- link.add("url", self.url.join(URL(url)))
+ Args:
+ body: The new body content. Can be:
+ - bytes/bytearray: Raw binary data
+ - str: Text data (will be encoded using charset from Content-Type)
+ - FormData: Form data that will be encoded as multipart/form-data
+ - Payload: A pre-configured payload object
+ - AsyncIterable: An async iterable of bytes chunks
+ - File-like object: Will be read and sent as binary data
+ - None: Clears the body
- links.add(str(key), MultiDictProxy(link))
+ Usage:
+ # CORRECT: Use update_body
+ await request.update_body(b"new request data")
- return MultiDictProxy(links)
+ # WRONG: Don't set body directly
+ # request.body = b"new request data" # This will leak resources!
- async def start(self, connection: "Connection") -> "ClientResponse":
- """Start response processing."""
- self._closed = False
- self._protocol = connection.protocol
- self._connection = connection
+ # Update with form data
+ form_data = FormData()
+ form_data.add_field('field', 'value')
+ await request.update_body(form_data)
- with self._timer:
- while True:
- # read response
- try:
- protocol = self._protocol
- message, payload = await protocol.read() # type: ignore[union-attr]
- except http.HttpProcessingError as exc:
- raise ClientResponseError(
- self.request_info,
- self.history,
- status=exc.code,
- message=exc.message,
- headers=exc.headers,
- ) from exc
+ # Clear body
+ await request.update_body(None)
- if message.code < 100 or message.code > 199 or message.code == 101:
- break
+ Note:
+ This method is async because it may need to close file handles or
+ other resources associated with the previous payload. Always await
+ this method to ensure proper cleanup.
- if self._continue is not None:
- set_result(self._continue, True)
- self._continue = None
+ Warning:
+ Setting request.body directly is highly discouraged and can lead to:
+ - Resource leaks (unclosed file handles, streams)
+ - Memory leaks (unreleased buffers)
+ - Unexpected behavior with streaming payloads
- # payload eof handler
- payload.on_eof(self._response_eof)
+ It is not recommended to change the payload type in middleware. If the
+ body was already set (e.g., as bytes), it's best to keep the same type
+ rather than converting it (e.g., to str) as this may result in unexpected
+ behavior.
- # response status
- self.version = message.version
- self.status = message.code
- self.reason = message.reason
+ See Also:
+ - update_body_from_data: Synchronous body update without cleanup
+ - body property: Direct body access (STRONGLY DISCOURAGED)
- # headers
- self._headers = message.headers # type is CIMultiDictProxy
- self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
+ """
+ # Close existing payload if it exists and needs closing
+ if self._body is not None:
+ await self._body.close()
+ self._update_body(body)
- # payload
- self.content = payload
+ def update_expect_continue(self, expect: bool = False) -> None:
+ if expect:
+ self.headers[hdrs.EXPECT] = "100-continue"
+ elif (
+ hdrs.EXPECT in self.headers
+ and self.headers[hdrs.EXPECT].lower() == "100-continue"
+ ):
+ expect = True
- # cookies
- if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()):
- cookies = SimpleCookie()
- for hdr in cookie_hdrs:
- try:
- cookies.load(hdr)
- except CookieError as exc:
- client_logger.warning("Can not load response cookies: %s", exc)
- self._cookies = cookies
- return self
+ if expect:
+ self._continue = self.loop.create_future()
- def _response_eof(self) -> None:
- if self._closed:
+ def update_proxy(
+ self,
+ proxy: Optional[URL],
+ proxy_auth: Optional[BasicAuth],
+ proxy_headers: Optional[LooseHeaders],
+ ) -> None:
+ self.proxy = proxy
+ if proxy is None:
+ self.proxy_auth = None
+ self.proxy_headers = None
return
- # protocol could be None because connection could be detached
- protocol = self._connection and self._connection.protocol
- if protocol is not None and protocol.upgraded:
- return
+ if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
+ raise ValueError("proxy_auth must be None or BasicAuth() tuple")
+ self.proxy_auth = proxy_auth
- self._closed = True
- self._cleanup_writer()
- self._release_connection()
+ if proxy_headers is not None and not isinstance(
+ proxy_headers, (MultiDict, MultiDictProxy)
+ ):
+ proxy_headers = CIMultiDict(proxy_headers)
+ self.proxy_headers = proxy_headers
- @property
- def closed(self) -> bool:
- return self._closed
+ async def write_bytes(
+ self,
+ writer: AbstractStreamWriter,
+ conn: "Connection",
+ content_length: Optional[int],
+ ) -> None:
+ """
+ Write the request body to the connection stream.
- def close(self) -> None:
- if not self._released:
- self._notify_content()
+ This method handles writing different types of request bodies:
+ 1. Payload objects (using their specialized write_with_length method)
+ 2. Bytes/bytearray objects
+ 3. Iterable body content
- self._closed = True
- if self._loop is None or self._loop.is_closed():
- return
+ Args:
+ writer: The stream writer to write the body to
+ conn: The connection being used for this request
+ content_length: Optional maximum number of bytes to write from the body
+ (None means write the entire body)
- self._cleanup_writer()
- if self._connection is not None:
- self._connection.close()
- self._connection = None
+ The method properly handles:
+ - Waiting for 100-Continue responses if required
+ - Content length constraints for chunked encoding
+ - Error handling for network issues, cancellation, and other exceptions
+ - Signaling EOF and timeout management
- def release(self) -> Any:
- if not self._released:
- self._notify_content()
+ Raises:
+ ClientOSError: When there's an OS-level error writing the body
+ ClientConnectionError: When there's a general connection error
+ asyncio.CancelledError: When the operation is cancelled
- self._closed = True
+ """
+ # 100 response
+ if self._continue is not None:
+ # Force headers to be sent before waiting for 100-continue
+ writer.send_headers()
+ await writer.drain()
+ await self._continue
- self._cleanup_writer()
- self._release_connection()
- return noop()
+ protocol = conn.protocol
+ assert protocol is not None
+ try:
+ # This should be a rare case but the
+ # self._body can be set to None while
+ # the task is being started or we wait above
+ # for the 100-continue response.
+ # The more likely case is we have an empty
+ # payload, but 100-continue is still expected.
+ if self._body is not None:
+ await self._body.write_with_length(writer, content_length)
+ except OSError as underlying_exc:
+ reraised_exc = underlying_exc
+
+ # Distinguish between timeout and other OS errors for better error reporting
+ exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
+ underlying_exc, asyncio.TimeoutError
+ )
+ if exc_is_not_timeout:
+ reraised_exc = ClientOSError(
+ underlying_exc.errno,
+ f"Can not write request body for {self.url !s}",
+ )
+
+ set_exception(protocol, reraised_exc, underlying_exc)
+ except asyncio.CancelledError:
+ # Body hasn't been fully sent, so connection can't be reused
+ conn.close()
+ raise
+ except Exception as underlying_exc:
+ set_exception(
+ protocol,
+ ClientConnectionError(
+ "Failed to send bytes into the underlying connection "
+ f"{conn !s}: {underlying_exc!r}",
+ ),
+ underlying_exc,
+ )
+ else:
+ # Successfully wrote the body, signal EOF and start response timeout
+ await writer.write_eof()
+ protocol.start_timeout()
- @property
- def ok(self) -> bool:
- """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
+ async def send(self, conn: "Connection") -> "ClientResponse":
+ # Specify request target:
+ # - CONNECT request must send authority form URI
+ # - not CONNECT proxy must send absolute form URI
+ # - most common is origin form URI
+ if self.method == hdrs.METH_CONNECT:
+ connect_host = self.url.host_subcomponent
+ assert connect_host is not None
+ path = f"{connect_host}:{self.url.port}"
+ elif self.proxy and not self.is_ssl():
+ path = str(self.url)
+ else:
+ path = self.url.raw_path_qs
- This is **not** a check for ``200 OK`` but a check that the response
- status is under 400.
- """
- return 400 > self.status
+ protocol = conn.protocol
+ assert protocol is not None
+ writer = StreamWriter(
+ protocol,
+ self.loop,
+ on_chunk_sent=(
+ functools.partial(self._on_chunk_request_sent, self.method, self.url)
+ if self._traces
+ else None
+ ),
+ on_headers_sent=(
+ functools.partial(self._on_headers_request_sent, self.method, self.url)
+ if self._traces
+ else None
+ ),
+ )
- def raise_for_status(self) -> None:
- if not self.ok:
- # reason should always be not None for a started response
- assert self.reason is not None
+ if self.compress:
+ writer.enable_compression(self.compress) # type: ignore[arg-type]
- # If we're in a context we can rely on __aexit__() to release as the
- # exception propagates.
- if not self._in_context:
- self.release()
+ if self.chunked is not None:
+ writer.enable_chunking()
- raise ClientResponseError(
- self.request_info,
- self.history,
- status=self.status,
- message=self.reason,
- headers=self.headers,
+ # set default content-type
+ if (
+ self.method in self.POST_METHODS
+ and (
+ self._skip_auto_headers is None
+ or hdrs.CONTENT_TYPE not in self._skip_auto_headers
)
+ and hdrs.CONTENT_TYPE not in self.headers
+ ):
+ self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
- def _release_connection(self) -> None:
- if self._connection is not None:
- if self.__writer is None:
- self._connection.release()
- self._connection = None
- else:
- self.__writer.add_done_callback(lambda f: self._release_connection())
+ v = self.version
+ if hdrs.CONNECTION not in self.headers:
+ if conn._connector.force_close:
+ if v == HttpVersion11:
+ self.headers[hdrs.CONNECTION] = "close"
+ elif v == HttpVersion10:
+ self.headers[hdrs.CONNECTION] = "keep-alive"
- async def _wait_released(self) -> None:
- if self.__writer is not None:
- try:
- await self.__writer
- except asyncio.CancelledError:
- if (
- sys.version_info >= (3, 11)
- and (task := asyncio.current_task())
- and task.cancelling()
- ):
- raise
- self._release_connection()
+ # status + headers
+ status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}"
- def _cleanup_writer(self) -> None:
- if self.__writer is not None:
- self.__writer.cancel()
- self._session = None
+ # Buffer headers for potential coalescing with body
+ await writer.write_headers(status_line, self.headers)
- def _notify_content(self) -> None:
- content = self.content
- if content and content.exception() is None:
- set_exception(content, _CONNECTION_CLOSED_EXCEPTION)
- self._released = True
+ task: Optional["asyncio.Task[None]"]
+ if self._body or self._continue is not None or protocol.writing_paused:
+ coro = self.write_bytes(writer, conn, self._get_content_length())
+ if sys.version_info >= (3, 12):
+ # Optimization for Python 3.12, try to write
+ # bytes immediately to avoid having to schedule
+ # the task on the event loop.
+ task = asyncio.Task(coro, loop=self.loop, eager_start=True)
+ else:
+ task = self.loop.create_task(coro)
+ if task.done():
+ task = None
+ else:
+ self._writer = task
+ else:
+ # We have nothing to write because
+ # - there is no body
+ # - the protocol does not have writing paused
+ # - we are not waiting for a 100-continue response
+ protocol.start_timeout()
+ writer.set_eof()
+ task = None
+ response_class = self.response_class
+ assert response_class is not None
+ self.response = response_class(
+ self.method,
+ self.original_url,
+ writer=task,
+ continue100=self._continue,
+ timer=self._timer,
+ request_info=self.request_info,
+ traces=self._traces,
+ loop=self.loop,
+ session=self._session,
+ )
+ return self.response
- async def wait_for_close(self) -> None:
+ async def close(self) -> None:
if self.__writer is not None:
try:
await self.__writer
@@ -1414,107 +1499,20 @@ async def wait_for_close(self) -> None:
and task.cancelling()
):
raise
- self.release()
-
- async def read(self) -> bytes:
- """Read response payload."""
- if self._body is None:
- try:
- self._body = await self.content.read()
- for trace in self._traces:
- await trace.send_response_chunk_received(
- self.method, self.url, self._body
- )
- except BaseException:
- self.close()
- raise
- elif self._released: # Response explicitly released
- raise ClientConnectionError("Connection closed")
-
- protocol = self._connection and self._connection.protocol
- if protocol is None or not protocol.upgraded:
- await self._wait_released() # Underlying connection released
- return self._body
-
- def get_encoding(self) -> str:
- ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
- mimetype = helpers.parse_mimetype(ctype)
-
- encoding = mimetype.parameters.get("charset")
- if encoding:
- with contextlib.suppress(LookupError, ValueError):
- return codecs.lookup(encoding).name
-
- if mimetype.type == "application" and (
- mimetype.subtype == "json" or mimetype.subtype == "rdap"
- ):
- # RFC 7159 states that the default encoding is UTF-8.
- # RFC 7483 defines application/rdap+json
- return "utf-8"
-
- if self._body is None:
- raise RuntimeError(
- "Cannot compute fallback encoding of a not yet read body"
- )
-
- return self._resolve_charset(self, self._body)
-
- async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
- """Read response payload and decode."""
- if self._body is None:
- await self.read()
- if encoding is None:
- encoding = self.get_encoding()
-
- return self._body.decode(encoding, errors=errors) # type: ignore[union-attr]
-
- async def json(
- self,
- *,
- encoding: Optional[str] = None,
- loads: JSONDecoder = DEFAULT_JSON_DECODER,
- content_type: Optional[str] = "application/json",
- ) -> Any:
- """Read and decodes JSON response."""
- if self._body is None:
- await self.read()
-
- if content_type:
- ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
- if not _is_expected_content_type(ctype, content_type):
- raise ContentTypeError(
- self.request_info,
- self.history,
- status=self.status,
- message=(
- "Attempt to decode JSON with unexpected mimetype: %s" % ctype
- ),
- headers=self.headers,
- )
-
- stripped = self._body.strip() # type: ignore[union-attr]
- if not stripped:
- return None
-
- if encoding is None:
- encoding = self.get_encoding()
-
- return loads(stripped.decode(encoding))
+ def terminate(self) -> None:
+ if self.__writer is not None:
+ if not self.loop.is_closed():
+ self.__writer.cancel()
+ self.__writer.remove_done_callback(self.__reset_writer)
+ self.__writer = None
- async def __aenter__(self) -> "ClientResponse":
- self._in_context = True
- return self
+ async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
+ for trace in self._traces:
+ await trace.send_request_chunk_sent(method, url, chunk)
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
+ async def _on_headers_request_sent(
+ self, method: str, url: URL, headers: "CIMultiDict[str]"
) -> None:
- self._in_context = False
- # similar to _RequestContextManager, we do not need to check
- # for exceptions, response object can close connection
- # if state is broken
- self.release()
- await self.wait_for_close()
+ for trace in self._traces:
+ await trace.send_request_headers(method, url, headers)
From b192479238e63cdba013a5fa4d59b56944ec5cd7 Mon Sep 17 00:00:00 2001
From: "J. Nick Koston"
Date: Mon, 26 May 2025 09:47:16 -0500
Subject: [PATCH 11/13] Release 3.12.1rc0 (#11030)
---
CHANGES.rst | 20 ++++++++++++++++++++
aiohttp/__init__.py | 2 +-
2 files changed, 21 insertions(+), 1 deletion(-)
diff --git a/CHANGES.rst b/CHANGES.rst
index ddbebd82369..a2703bb0a8c 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,6 +10,26 @@
.. towncrier release notes start
+3.12.1rc0 (2025-05-26)
+======================
+
+Features
+--------
+
+- Added support for reusable request bodies to enable retries, redirects, and digest authentication -- by :user:`bdraco` and :user:`GLGDLY`.
+
+ Most payloads can now be safely reused multiple times, fixing long-standing issues where POST requests with form data or file uploads would fail on redirects with errors like "Form data has been processed already" or "I/O operation on closed file". This also enables digest authentication to work with request bodies and allows retry mechanisms to resend requests without consuming the payload. Note that payloads derived from async iterables may still not be reusable in some cases.
+
+
+ *Related issues and pull requests on GitHub:*
+ :issue:`5530`, :issue:`5577`, :issue:`9201`, :issue:`11017`.
+
+
+
+
+----
+
+
3.12.0 (2025-05-24)
===================
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index 4bc6a3a2b22..e61fb80e8c8 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.12.0.dev0"
+__version__ = "3.12.1rc0"
from typing import TYPE_CHECKING, Tuple
From 152e4160fde8985e770398da5271396d7a831198 Mon Sep 17 00:00:00 2001
From: "J. Nick Koston"
Date: Mon, 26 May 2025 10:46:02 -0500
Subject: [PATCH 12/13] Release 3.12.1 (#11031)
---
CHANGES.rst | 1412 +------------------------------------
CHANGES/11017.feature.rst | 3 -
CHANGES/5530.feature.rst | 1 -
CHANGES/5577.feature.rst | 1 -
CHANGES/9201.feature.rst | 1 -
aiohttp/__init__.py | 2 +-
6 files changed, 3 insertions(+), 1417 deletions(-)
delete mode 100644 CHANGES/11017.feature.rst
delete mode 120000 CHANGES/5530.feature.rst
delete mode 120000 CHANGES/5577.feature.rst
delete mode 120000 CHANGES/9201.feature.rst
diff --git a/CHANGES.rst b/CHANGES.rst
index a2703bb0a8c..82e7ad49de8 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -10,8 +10,8 @@
.. towncrier release notes start
-3.12.1rc0 (2025-05-26)
-======================
+3.12.1 (2025-05-26)
+===================
Features
--------
@@ -303,1414 +303,6 @@ Miscellaneous internal changes
-----
-
-
-3.12.0rc1 (2025-05-24)
-======================
-
-Bug fixes
----------
-
-- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco`
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`6009`, :issue:`10988`.
-
-
-
-- Response is now always True, instead of using MutableMapping behaviour (False when map is empty)
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10119`.
-
-
-
-- Fixed connection reuse for file-like data payloads by ensuring buffer
- truncation respects content-length boundaries and preventing premature
- connection closure race -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`.
-
-
-
-- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10951`.
-
-
-
-
-Features
---------
-
-- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware)
- that implements RFC 7616. The middleware supports all standard hash algorithms
- (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and
- 'auth-int' quality of protection options, and automatically manages the
- authentication flow by intercepting 401 responses and retrying with proper
- credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`2213`, :issue:`10725`.
-
-
-
-- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`.
-
- This change allows users to add middleware to the client session and requests, enabling features like
- authentication, logging, and request/response modification without modifying the core
- request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``,
- allowing middleware to access the session for making additional requests.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`.
-
-
-
-- Allow user setting zlib compression backend -- by :user:`TimMenninger`
-
- This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the
- zlib compression module of their choice. Default behavior continues to use
- the builtin ``zlib`` library.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9798`.
-
-
-
-- Added support for overriding the base URL with an absolute one in client sessions
- -- by :user:`vivodi`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10074`.
-
-
-
-- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10120`.
-
-
-
-- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10433`.
-
-
-
-- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options
- -- by :user:`TimMenninger`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`.
-
-
-
-- Started building armv7l manylinux wheels -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10797`.
-
-
-
-- Implemented shared DNS resolver management to fix excessive resolver object creation
- when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures
- only one ``DNSResolver`` object is created for default configurations, significantly
- reducing resource usage and improving performance for applications using multiple
- client sessions simultaneously -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10847`, :issue:`10923`, :issue:`10946`.
-
-
-
-- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10972`.
-
-
-
-- Optimized small HTTP requests/responses by coalescing headers and body into a single TCP packet -- by :user:`bdraco`.
-
- This change enhances network efficiency by reducing the number of packets sent for small HTTP payloads, improving latency and reducing overhead. Most importantly, this fixes compatibility with memory-constrained IoT devices that can only perform a single read operation and expect HTTP requests in one packet. The optimization uses zero-copy ``writelines`` when coalescing data and works with both regular and chunked transfer encoding.
-
- When ``aiohttp`` uses client middleware to communicate with an ``aiohttp`` server, connection reuse is more likely to occur since complete responses arrive in a single packet for small payloads.
-
- This aligns ``aiohttp`` with other popular HTTP clients that already coalesce small requests.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10991`.
-
-
-
-
-Improved documentation
-----------------------
-
-- Improved documentation for middleware by adding warnings and examples about
- request body stream consumption. The documentation now clearly explains that
- request body streams can only be read once and provides best practices for
- sharing parsed request data between middleware and handlers -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`2914`.
-
-
-
-
-Packaging updates and notes for downstreams
--------------------------------------------
-
-- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10662`.
-
-
-
-- Added support for building against system ``llhttp`` library -- by :user:`mgorny`.
-
- This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that
- can be used to build aiohttp against the system install of the ``llhttp`` library rather
- than the vendored one.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10759`.
-
-
-
-- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`.
-
- As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10823`.
-
-
-
-- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco`
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10877`.
-
-
-
-
-Contributor-facing changes
---------------------------
-
-- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9705`, :issue:`10761`.
-
-
-
-- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and
- make it compatible with Python 3.14.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-- Added Winloop to test suite to support in the future -- by :user:`Vizonex`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10922`.
-
-
-
-
-Miscellaneous internal changes
-------------------------------
-
-- Added support for the ``partitioned`` attribute in the ``set_cookie`` method.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9870`.
-
-
-
-- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10146`.
-
-
-
-
-----
-
-
-3.12.0rc0 (2025-05-23)
-======================
-
-Bug fixes
----------
-
-- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco`
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`6009`, :issue:`10988`.
-
-
-
-- Response is now always True, instead of using MutableMapping behaviour (False when map is empty)
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10119`.
-
-
-
-- Fixed connection reuse for file-like data payloads by ensuring buffer
- truncation respects content-length boundaries and preventing premature
- connection closure race -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`.
-
-
-
-- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10951`.
-
-
-
-
-Features
---------
-
-- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware)
- that implements RFC 7616. The middleware supports all standard hash algorithms
- (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and
- 'auth-int' quality of protection options, and automatically manages the
- authentication flow by intercepting 401 responses and retrying with proper
- credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`2213`, :issue:`10725`.
-
-
-
-- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`.
-
- This change allows users to add middleware to the client session and requests, enabling features like
- authentication, logging, and request/response modification without modifying the core
- request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``,
- allowing middleware to access the session for making additional requests.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`.
-
-
-
-- Allow user setting zlib compression backend -- by :user:`TimMenninger`
-
- This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the
- zlib compression module of their choice. Default behavior continues to use
- the builtin ``zlib`` library.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9798`.
-
-
-
-- Added support for overriding the base URL with an absolute one in client sessions
- -- by :user:`vivodi`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10074`.
-
-
-
-- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10120`.
-
-
-
-- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10433`.
-
-
-
-- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options
- -- by :user:`TimMenninger`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`.
-
-
-
-- Started building armv7l manylinux wheels -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10797`.
-
-
-
-- Implemented shared DNS resolver management to fix excessive resolver object creation
- when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures
- only one ``DNSResolver`` object is created for default configurations, significantly
- reducing resource usage and improving performance for applications using multiple
- client sessions simultaneously -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10847`, :issue:`10923`, :issue:`10946`.
-
-
-
-- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10972`.
-
-
-
-
-Improved documentation
-----------------------
-
-- Improved documentation for middleware by adding warnings and examples about
- request body stream consumption. The documentation now clearly explains that
- request body streams can only be read once and provides best practices for
- sharing parsed request data between middleware and handlers -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`2914`.
-
-
-
-
-Packaging updates and notes for downstreams
--------------------------------------------
-
-- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10662`.
-
-
-
-- Added support for building against system ``llhttp`` library -- by :user:`mgorny`.
-
- This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that
- can be used to build aiohttp against the system install of the ``llhttp`` library rather
- than the vendored one.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10759`.
-
-
-
-- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`.
-
- As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10823`.
-
-
-
-- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco`
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10877`.
-
-
-
-
-Contributor-facing changes
---------------------------
-
-- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9705`, :issue:`10761`.
-
-
-
-- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and
- make it compatible with Python 3.14.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-- Added Winloop to test suite to support in the future -- by :user:`Vizonex`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10922`.
-
-
-
-
-Miscellaneous internal changes
-------------------------------
-
-- Added support for the ``partitioned`` attribute in the ``set_cookie`` method.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9870`.
-
-
-
-- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10146`.
-
-
-
-
-----
-
-
-3.12.0b3 (2025-05-22)
-=====================
-
-Bug fixes
----------
-
-- Response is now always True, instead of using MutableMapping behaviour (False when map is empty)
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10119`.
-
-
-
-- Fixed connection reuse for file-like data payloads by ensuring buffer
- truncation respects content-length boundaries and preventing premature
- connection closure race -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`.
-
-
-
-- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10951`.
-
-
-
-
-Features
---------
-
-- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware)
- that implements RFC 7616. The middleware supports all standard hash algorithms
- (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and
- 'auth-int' quality of protection options, and automatically manages the
- authentication flow by intercepting 401 responses and retrying with proper
- credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`2213`, :issue:`10725`.
-
-
-
-- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`.
-
- This change allows users to add middleware to the client session and requests, enabling features like
- authentication, logging, and request/response modification without modifying the core
- request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``,
- allowing middleware to access the session for making additional requests.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9732`, :issue:`10902`, :issue:`10952`.
-
-
-
-- Allow user setting zlib compression backend -- by :user:`TimMenninger`
-
- This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the
- zlib compression module of their choice. Default behavior continues to use
- the builtin ``zlib`` library.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9798`.
-
-
-
-- Added support for overriding the base URL with an absolute one in client sessions
- -- by :user:`vivodi`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10074`.
-
-
-
-- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10120`.
-
-
-
-- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10433`.
-
-
-
-- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options
- -- by :user:`TimMenninger`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10474`, :issue:`10520`.
-
-
-
-- Started building armv7l manylinux wheels -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10797`.
-
-
-
-- Implemented shared DNS resolver management to fix excessive resolver object creation
- when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures
- only one ``DNSResolver`` object is created for default configurations, significantly
- reducing resource usage and improving performance for applications using multiple
- client sessions simultaneously -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10847`, :issue:`10923`, :issue:`10946`.
-
-
-
-
-Packaging updates and notes for downstreams
--------------------------------------------
-
-- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10662`.
-
-
-
-- Added support for building against system ``llhttp`` library -- by :user:`mgorny`.
-
- This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that
- can be used to build aiohttp against the system install of the ``llhttp`` library rather
- than the vendored one.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10759`.
-
-
-
-- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`.
-
- As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10823`.
-
-
-
-- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco`
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10877`.
-
-
-
-
-Contributor-facing changes
---------------------------
-
-- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9705`, :issue:`10761`.
-
-
-
-- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and
- make it compatible with Python 3.14.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-- Added Winloop to test suite to support in the future -- by :user:`Vizonex`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10922`.
-
-
-
-
-Miscellaneous internal changes
-------------------------------
-
-- Added support for the ``partitioned`` attribute in the ``set_cookie`` method.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9870`.
-
-
-
-- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10146`.
-
-
-
-
-----
-
-
-3.12.0b2 (2025-05-22)
-=====================
-
-Bug fixes
----------
-
-- Response is now always True, instead of using MutableMapping behaviour (False when map is empty)
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10119`.
-
-
-
-- Fixed connection reuse for file-like data payloads by ensuring buffer
- truncation respects content-length boundaries and preventing premature
- connection closure race -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`.
-
-
-
-- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-
-Features
---------
-
-- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware)
- that implements RFC 7616. The middleware supports all standard hash algorithms
- (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and
- 'auth-int' quality of protection options, and automatically manages the
- authentication flow by intercepting 401 responses and retrying with proper
- credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`2213`, :issue:`10725`.
-
-
-
-- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`.
-
- This change allows users to add middleware to the client session and requests, enabling features like
- authentication, logging, and request/response modification without modifying the core
- request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``,
- allowing middleware to access the session for making additional requests.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9732`, :issue:`10902`.
-
-
-
-- Allow user setting zlib compression backend -- by :user:`TimMenninger`
-
- This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the
- zlib compression module of their choice. Default behavior continues to use
- the builtin ``zlib`` library.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9798`.
-
-
-
-- Added support for overriding the base URL with an absolute one in client sessions
- -- by :user:`vivodi`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10074`.
-
-
-
-- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10120`.
-
-
-
-- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10433`.
-
-
-
-- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options
- -- by :user:`TimMenninger`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10474`, :issue:`10520`.
-
-
-
-- Started building armv7l manylinux wheels -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10797`.
-
-
-
-- Implemented shared DNS resolver management to fix excessive resolver object creation
- when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures
- only one ``DNSResolver`` object is created for default configurations, significantly
- reducing resource usage and improving performance for applications using multiple
- client sessions simultaneously -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10847`, :issue:`10923`, :issue:`10946`.
-
-
-
-
-Packaging updates and notes for downstreams
--------------------------------------------
-
-- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10662`.
-
-
-
-- Added support for building against system ``llhttp`` library -- by :user:`mgorny`.
-
- This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that
- can be used to build aiohttp against the system install of the ``llhttp`` library rather
- than the vendored one.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10759`.
-
-
-
-- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`.
-
- As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10823`.
-
-
-
-- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco`
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10877`.
-
-
-
-
-Contributor-facing changes
---------------------------
-
-- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9705`, :issue:`10761`.
-
-
-
-- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and
- make it compatible with Python 3.14.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-- Added Winloop to test suite to support in the future -- by :user:`Vizonex`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10922`.
-
-
-
-
-Miscellaneous internal changes
-------------------------------
-
-- Added support for the ``partitioned`` attribute in the ``set_cookie`` method.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9870`.
-
-
-
-- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10146`.
-
-
-
-
-----
-
-
-3.12.0b1 (2025-05-22)
-=====================
-
-Bug fixes
----------
-
-- Response is now always True, instead of using MutableMapping behaviour (False when map is empty)
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10119`.
-
-
-
-- Fixed connection reuse for file-like data payloads by ensuring buffer
- truncation respects content-length boundaries and preventing premature
- connection closure race -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10325`, :issue:`10915`.
-
-
-
-- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-
-Features
---------
-
-- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware)
- that implements RFC 7616. The middleware supports all standard hash algorithms
- (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and
- 'auth-int' quality of protection options, and automatically manages the
- authentication flow by intercepting 401 responses and retrying with proper
- credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`2213`, :issue:`10725`.
-
-
-
-- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`.
-
- This change allows users to add middleware to the client session and requests, enabling features like
- authentication, logging, and request/response modification without modifying the core
- request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``,
- allowing middleware to access the session for making additional requests.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9732`, :issue:`10902`.
-
-
-
-- Allow user setting zlib compression backend -- by :user:`TimMenninger`
-
- This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the
- zlib compression module of their choice. Default behavior continues to use
- the builtin ``zlib`` library.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9798`.
-
-
-
-- Added support for overriding the base URL with an absolute one in client sessions
- -- by :user:`vivodi`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10074`.
-
-
-
-- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10120`.
-
-
-
-- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10433`.
-
-
-
-- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options
- -- by :user:`TimMenninger`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10474`, :issue:`10520`.
-
-
-
-- Started building armv7l manylinux wheels -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10797`.
-
-
-
-- Implemented shared DNS resolver management to fix excessive resolver object creation
- when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures
- only one ``DNSResolver`` object is created for default configurations, significantly
- reducing resource usage and improving performance for applications using multiple
- client sessions simultaneously -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10847`, :issue:`10923`.
-
-
-
-
-Packaging updates and notes for downstreams
--------------------------------------------
-
-- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10662`.
-
-
-
-- Added support for building against system ``llhttp`` library -- by :user:`mgorny`.
-
- This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that
- can be used to build aiohttp against the system install of the ``llhttp`` library rather
- than the vendored one.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10759`.
-
-
-
-- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`.
-
- As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10823`.
-
-
-
-- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco`
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10877`.
-
-
-
-
-Contributor-facing changes
---------------------------
-
-- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9705`, :issue:`10761`.
-
-
-
-- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and
- make it compatible with Python 3.14.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-- Added Winloop to test suite to support in the future -- by :user:`Vizonex`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10922`.
-
-
-
-
-Miscellaneous internal changes
-------------------------------
-
-- Added support for the ``partitioned`` attribute in the ``set_cookie`` method.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9870`.
-
-
-
-- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10146`.
-
-
-
-
-----
-
-
-3.12.0b0 (2025-05-20)
-=====================
-
-Bug fixes
----------
-
-- Response is now always True, instead of using MutableMapping behaviour (False when map is empty)
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10119`.
-
-
-
-- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-
-Features
---------
-
-- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware)
- that implements RFC 7616. The middleware supports all standard hash algorithms
- (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and
- 'auth-int' quality of protection options, and automatically manages the
- authentication flow by intercepting 401 responses and retrying with proper
- credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`2213`, :issue:`10725`.
-
-
-
-- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`.
-
- This change allows users to add middleware to the client session and requests, enabling features like
- authentication, logging, and request/response modification without modifying the core
- request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``,
- allowing middleware to access the session for making additional requests.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9732`, :issue:`10902`.
-
-
-
-- Allow user setting zlib compression backend -- by :user:`TimMenninger`
-
- This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the
- zlib compression module of their choice. Default behavior continues to use
- the builtin ``zlib`` library.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9798`.
-
-
-
-- Added support for overriding the base URL with an absolute one in client sessions
- -- by :user:`vivodi`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10074`.
-
-
-
-- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10120`.
-
-
-
-- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10433`.
-
-
-
-- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options
- -- by :user:`TimMenninger`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10474`, :issue:`10520`.
-
-
-
-- Started building armv7l manylinux wheels -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10797`.
-
-
-
-- Implemented shared DNS resolver management to fix excessive resolver object creation
- when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures
- only one ``DNSResolver`` object is created for default configurations, significantly
- reducing resource usage and improving performance for applications using multiple
- client sessions simultaneously -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10847`.
-
-
-
-
-Packaging updates and notes for downstreams
--------------------------------------------
-
-- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10662`.
-
-
-
-- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`.
-
- As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10823`.
-
-
-
-- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco`
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10877`.
-
-
-
-
-Contributor-facing changes
---------------------------
-
-- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9705`, :issue:`10761`.
-
-
-
-- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and
- make it compatible with Python 3.14.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10851`.
-
-
-
-
-Miscellaneous internal changes
-------------------------------
-
-- Added support for the ``partitioned`` attribute in the ``set_cookie`` method.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`9870`.
-
-
-
-- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`.
-
-
- *Related issues and pull requests on GitHub:*
- :issue:`10146`.
-
-
-
-
----
diff --git a/CHANGES/11017.feature.rst b/CHANGES/11017.feature.rst
deleted file mode 100644
index 361c56e3fe8..00000000000
--- a/CHANGES/11017.feature.rst
+++ /dev/null
@@ -1,3 +0,0 @@
-Added support for reusable request bodies to enable retries, redirects, and digest authentication -- by :user:`bdraco` and :user:`GLGDLY`.
-
-Most payloads can now be safely reused multiple times, fixing long-standing issues where POST requests with form data or file uploads would fail on redirects with errors like "Form data has been processed already" or "I/O operation on closed file". This also enables digest authentication to work with request bodies and allows retry mechanisms to resend requests without consuming the payload. Note that payloads derived from async iterables may still not be reusable in some cases.
diff --git a/CHANGES/5530.feature.rst b/CHANGES/5530.feature.rst
deleted file mode 120000
index 63bf4429e55..00000000000
--- a/CHANGES/5530.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-11017.feature.rst
\ No newline at end of file
diff --git a/CHANGES/5577.feature.rst b/CHANGES/5577.feature.rst
deleted file mode 120000
index 63bf4429e55..00000000000
--- a/CHANGES/5577.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-11017.feature.rst
\ No newline at end of file
diff --git a/CHANGES/9201.feature.rst b/CHANGES/9201.feature.rst
deleted file mode 120000
index 63bf4429e55..00000000000
--- a/CHANGES/9201.feature.rst
+++ /dev/null
@@ -1 +0,0 @@
-11017.feature.rst
\ No newline at end of file
diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py
index e61fb80e8c8..5c88b0724ce 100644
--- a/aiohttp/__init__.py
+++ b/aiohttp/__init__.py
@@ -1,4 +1,4 @@
-__version__ = "3.12.1rc0"
+__version__ = "3.12.1"
from typing import TYPE_CHECKING, Tuple
From 1172dee0beb6b72d2d0c617b3091c7dc2d65c266 Mon Sep 17 00:00:00 2001
From: "pre-commit-ci[bot]"
<66853113+pre-commit-ci[bot]@users.noreply.github.com>
Date: Mon, 26 May 2025 16:54:04 +0000
Subject: [PATCH 13/13] [pre-commit.ci] pre-commit autoupdate (#11033)
Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
---
.pre-commit-config.yaml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index c51e384bbd2..a0820e12283 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -97,7 +97,7 @@ repos:
- id: detect-private-key
exclude: ^examples/
- repo: https://github.com/asottile/pyupgrade
- rev: 'v3.19.1'
+ rev: 'v3.20.0'
hooks:
- id: pyupgrade
args: ['--py37-plus']