diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c51e384bbd2..a0820e12283 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -97,7 +97,7 @@ repos: - id: detect-private-key exclude: ^examples/ - repo: https://github.com/asottile/pyupgrade - rev: 'v3.19.1' + rev: 'v3.20.0' hooks: - id: pyupgrade args: ['--py37-plus'] diff --git a/CHANGES.rst b/CHANGES.rst index c190d1aa69c..253864320eb 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,26 @@ .. towncrier release notes start +3.12.1 (2025-05-26) +=================== + +Features +-------- + +- Added support for reusable request bodies to enable retries, redirects, and digest authentication -- by :user:`bdraco` and :user:`GLGDLY`. + + Most payloads can now be safely reused multiple times, fixing long-standing issues where POST requests with form data or file uploads would fail on redirects with errors like "Form data has been processed already" or "I/O operation on closed file". This also enables digest authentication to work with request bodies and allows retry mechanisms to resend requests without consuming the payload. Note that payloads derived from async iterables may still not be reusable in some cases. + + + *Related issues and pull requests on GitHub:* + :issue:`5530`, :issue:`5577`, :issue:`9201`, :issue:`11017`. + + + + +---- + + 3.12.0 (2025-05-24) =================== @@ -283,1414 +303,6 @@ Miscellaneous internal changes ----- - - -3.12.0rc1 (2025-05-24) -====================== - -Bug fixes ---------- - -- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`6009`, :issue:`10988`. - - - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10951`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`, :issue:`10946`. - - - -- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. - - - *Related issues and pull requests on GitHub:* - :issue:`10972`. - - - -- Optimized small HTTP requests/responses by coalescing headers and body into a single TCP packet -- by :user:`bdraco`. - - This change enhances network efficiency by reducing the number of packets sent for small HTTP payloads, improving latency and reducing overhead. Most importantly, this fixes compatibility with memory-constrained IoT devices that can only perform a single read operation and expect HTTP requests in one packet. The optimization uses zero-copy ``writelines`` when coalescing data and works with both regular and chunked transfer encoding. - - When ``aiohttp`` uses client middleware to communicate with an ``aiohttp`` server, connection reuse is more likely to occur since complete responses arrive in a single packet for small payloads. - - This aligns ``aiohttp`` with other popular HTTP clients that already coalesce small requests. - - - *Related issues and pull requests on GitHub:* - :issue:`10991`. - - - - -Improved documentation ----------------------- - -- Improved documentation for middleware by adding warnings and examples about - request body stream consumption. The documentation now clearly explains that - request body streams can only be read once and provides best practices for - sharing parsed request data between middleware and handlers -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2914`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0rc0 (2025-05-23) -====================== - -Bug fixes ---------- - -- Fixed :py:attr:`~aiohttp.web.WebSocketResponse.prepared` property to correctly reflect the prepared state, especially during timeout scenarios -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`6009`, :issue:`10988`. - - - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10951`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`, :issue:`10945`, :issue:`10952`, :issue:`10959`, :issue:`10968`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`, :issue:`10961`, :issue:`10962`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`, :issue:`10946`. - - - -- Upgraded to LLHTTP 9.3.0 -- by :user:`Dreamsorcerer`. - - - *Related issues and pull requests on GitHub:* - :issue:`10972`. - - - - -Improved documentation ----------------------- - -- Improved documentation for middleware by adding warnings and examples about - request body stream consumption. The documentation now clearly explains that - request body streams can only be read once and provides best practices for - sharing parsed request data between middleware and handlers -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2914`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0b3 (2025-05-22) -===================== - -Bug fixes ---------- - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Fixed :py:class:`~aiohttp.resolver.AsyncResolver` not using the ``loop`` argument in versions 3.x where it should still be supported -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10951`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`, :issue:`10952`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`, :issue:`10946`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0b2 (2025-05-22) -===================== - -Bug fixes ---------- - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`, :issue:`10941`, :issue:`10943`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`, :issue:`10946`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0b1 (2025-05-22) -===================== - -Bug fixes ---------- - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed connection reuse for file-like data payloads by ensuring buffer - truncation respects content-length boundaries and preventing premature - connection closure race -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10325`, :issue:`10915`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`, :issue:`10923`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- Added support for building against system ``llhttp`` library -- by :user:`mgorny`. - - This change adds support for :envvar:`AIOHTTP_USE_SYSTEM_DEPS` environment variable that - can be used to build aiohttp against the system install of the ``llhttp`` library rather - than the vendored one. - - - *Related issues and pull requests on GitHub:* - :issue:`10759`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - -- Added Winloop to test suite to support in the future -- by :user:`Vizonex`. - - - *Related issues and pull requests on GitHub:* - :issue:`10922`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ----- - - -3.12.0b0 (2025-05-20) -===================== - -Bug fixes ---------- - -- Response is now always True, instead of using MutableMapping behaviour (False when map is empty) - - - *Related issues and pull requests on GitHub:* - :issue:`10119`. - - - -- Fixed pytest plugin to not use deprecated :py:mod:`asyncio` policy APIs. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - - -Features --------- - -- Added a comprehensive HTTP Digest Authentication client middleware (DigestAuthMiddleware) - that implements RFC 7616. The middleware supports all standard hash algorithms - (MD5, SHA, SHA-256, SHA-512) with session variants, handles both 'auth' and - 'auth-int' quality of protection options, and automatically manages the - authentication flow by intercepting 401 responses and retrying with proper - credentials -- by :user:`feus4177`, :user:`TimMenninger`, and :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`2213`, :issue:`10725`. - - - -- Added client middleware support -- by :user:`bdraco` and :user:`Dreamsorcerer`. - - This change allows users to add middleware to the client session and requests, enabling features like - authentication, logging, and request/response modification without modifying the core - request logic. Additionally, the ``session`` attribute was added to ``ClientRequest``, - allowing middleware to access the session for making additional requests. - - - *Related issues and pull requests on GitHub:* - :issue:`9732`, :issue:`10902`. - - - -- Allow user setting zlib compression backend -- by :user:`TimMenninger` - - This change allows the user to call :func:`aiohttp.set_zlib_backend()` with the - zlib compression module of their choice. Default behavior continues to use - the builtin ``zlib`` library. - - - *Related issues and pull requests on GitHub:* - :issue:`9798`. - - - -- Added support for overriding the base URL with an absolute one in client sessions - -- by :user:`vivodi`. - - - *Related issues and pull requests on GitHub:* - :issue:`10074`. - - - -- Added ``host`` parameter to ``aiohttp_server`` fixture -- by :user:`christianwbrock`. - - - *Related issues and pull requests on GitHub:* - :issue:`10120`. - - - -- Detect blocking calls in coroutines using BlockBuster -- by :user:`cbornet`. - - - *Related issues and pull requests on GitHub:* - :issue:`10433`. - - - -- Added ``socket_factory`` to :py:class:`aiohttp.TCPConnector` to allow specifying custom socket options - -- by :user:`TimMenninger`. - - - *Related issues and pull requests on GitHub:* - :issue:`10474`, :issue:`10520`. - - - -- Started building armv7l manylinux wheels -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10797`. - - - -- Implemented shared DNS resolver management to fix excessive resolver object creation - when using multiple client sessions. The new ``_DNSResolverManager`` singleton ensures - only one ``DNSResolver`` object is created for default configurations, significantly - reducing resource usage and improving performance for applications using multiple - client sessions simultaneously -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10847`. - - - - -Packaging updates and notes for downstreams -------------------------------------------- - -- Removed non SPDX-license description from ``setup.cfg`` -- by :user:`devanshu-ziphq`. - - - *Related issues and pull requests on GitHub:* - :issue:`10662`. - - - -- ``aiodns`` is now installed on Windows with speedups extra -- by :user:`bdraco`. - - As of ``aiodns`` 3.3.0, ``SelectorEventLoop`` is no longer required when using ``pycares`` 4.7.0 or later. - - - *Related issues and pull requests on GitHub:* - :issue:`10823`. - - - -- Fixed compatibility issue with Cython 3.1.1 -- by :user:`bdraco` - - - *Related issues and pull requests on GitHub:* - :issue:`10877`. - - - - -Contributor-facing changes --------------------------- - -- Sped up tests by disabling ``blockbuster`` fixture for ``test_static_file_huge`` and ``test_static_file_huge_cancel`` tests -- by :user:`dikos1337`. - - - *Related issues and pull requests on GitHub:* - :issue:`9705`, :issue:`10761`. - - - -- Updated tests to avoid using deprecated :py:mod:`asyncio` policy APIs and - make it compatible with Python 3.14. - - - *Related issues and pull requests on GitHub:* - :issue:`10851`. - - - - -Miscellaneous internal changes ------------------------------- - -- Added support for the ``partitioned`` attribute in the ``set_cookie`` method. - - - *Related issues and pull requests on GitHub:* - :issue:`9870`. - - - -- Setting :attr:`aiohttp.web.StreamResponse.last_modified` to an unsupported type will now raise :exc:`TypeError` instead of silently failing -- by :user:`bdraco`. - - - *Related issues and pull requests on GitHub:* - :issue:`10146`. - - - - ---- diff --git a/CHANGES/11017.feature.rst b/CHANGES/11017.feature.rst deleted file mode 100644 index 361c56e3fe8..00000000000 --- a/CHANGES/11017.feature.rst +++ /dev/null @@ -1,3 +0,0 @@ -Added support for reusable request bodies to enable retries, redirects, and digest authentication -- by :user:`bdraco` and :user:`GLGDLY`. - -Most payloads can now be safely reused multiple times, fixing long-standing issues where POST requests with form data or file uploads would fail on redirects with errors like "Form data has been processed already" or "I/O operation on closed file". This also enables digest authentication to work with request bodies and allows retry mechanisms to resend requests without consuming the payload. Note that payloads derived from async iterables may still not be reusable in some cases. diff --git a/CHANGES/5530.feature.rst b/CHANGES/5530.feature.rst deleted file mode 120000 index 63bf4429e55..00000000000 --- a/CHANGES/5530.feature.rst +++ /dev/null @@ -1 +0,0 @@ -11017.feature.rst \ No newline at end of file diff --git a/CHANGES/5577.feature.rst b/CHANGES/5577.feature.rst deleted file mode 120000 index 63bf4429e55..00000000000 --- a/CHANGES/5577.feature.rst +++ /dev/null @@ -1 +0,0 @@ -11017.feature.rst \ No newline at end of file diff --git a/CHANGES/9201.feature.rst b/CHANGES/9201.feature.rst deleted file mode 120000 index 63bf4429e55..00000000000 --- a/CHANGES/9201.feature.rst +++ /dev/null @@ -1 +0,0 @@ -11017.feature.rst \ No newline at end of file diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py index 9a8ffc18313..b2daf76e6bb 100644 --- a/aiohttp/client_middleware_digest_auth.py +++ b/aiohttp/client_middleware_digest_auth.py @@ -193,7 +193,9 @@ def __init__( self._nonce_count = 0 self._challenge: DigestAuthChallenge = {} - async def _encode(self, method: str, url: URL, body: Union[bytes, Payload]) -> str: + async def _encode( + self, method: str, url: URL, body: Union[Payload, Literal[b""]] + ) -> str: """ Build digest authorization header for the current challenge. @@ -274,10 +276,10 @@ def KD(s: bytes, d: bytes) -> bytes: A1 = b":".join((self._login_bytes, realm_bytes, self._password_bytes)) A2 = f"{method.upper()}:{path}".encode() if qop == "auth-int": - if isinstance(body, bytes): # will always be empty bytes unless Payload - entity_bytes = body - else: + if isinstance(body, Payload): # will always be empty bytes unless Payload entity_bytes = await body.as_bytes() # Get bytes from Payload + else: + entity_bytes = body entity_hash = H(entity_bytes) A2 = b":".join((A2, entity_hash)) diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 59a11be3764..6823da38346 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -17,6 +17,7 @@ Dict, Iterable, List, + Literal, Mapping, NamedTuple, Optional, @@ -95,6 +96,7 @@ from .tracing import Trace +_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed") _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") @@ -209,1105 +211,1208 @@ def _warn_if_unclosed_payload(payload: payload.Payload, stacklevel: int = 2) -> ) -class ClientRequest: - GET_METHODS = { - hdrs.METH_GET, - hdrs.METH_HEAD, - hdrs.METH_OPTIONS, - hdrs.METH_TRACE, - } - POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} - ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) - - DEFAULT_HEADERS = { - hdrs.ACCEPT: "*/*", - hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), - } - - # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. - _body: Union[None, payload.Payload] = None - auth = None - response = None +class ClientResponse(HeadersMixin): + # Some of these attributes are None when created, + # but will be set by the start() method. + # As the end user will likely never see the None values, we cheat the types below. + # from the Status-Line of the response + version: Optional[HttpVersion] = None # HTTP-Version + status: int = None # type: ignore[assignment] # Status-Code + reason: Optional[str] = None # Reason-Phrase - # These class defaults help create_autospec() work correctly. - # If autospec is improved in future, maybe these can be removed. - url = URL() - method = "GET" + content: StreamReader = None # type: ignore[assignment] # Payload stream + _body: Optional[bytes] = None + _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] + _history: Tuple["ClientResponse", ...] = () + _raw_headers: RawHeaders = None # type: ignore[assignment] - __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data - _continue = None # waiter future for '100 Continue' response + _connection: Optional["Connection"] = None # current connection + _cookies: Optional[SimpleCookie] = None + _continue: Optional["asyncio.Future[bool]"] = None + _source_traceback: Optional[traceback.StackSummary] = None + _session: Optional["ClientSession"] = None + # set up by ClientRequest after ClientResponse object creation + # post-init stage allows to not change ctor signature + _closed = True # to allow __del__ for non-initialized properly response + _released = False + _in_context = False - _skip_auto_headers: Optional["CIMultiDict[None]"] = None + _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8" - # N.B. - # Adding __del__ method with self._writer closing doesn't make sense - # because _writer is instance method, thus it keeps a reference to self. - # Until writer has finished finalizer will not be called. + __writer: Optional["asyncio.Task[None]"] = None def __init__( self, method: str, url: URL, *, - params: Query = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - data: Any = None, - cookies: Optional[LooseCookies] = None, - auth: Optional[BasicAuth] = None, - version: http.HttpVersion = http.HttpVersion11, - compress: Union[str, bool] = False, - chunked: Optional[bool] = None, - expect100: bool = False, + writer: "Optional[asyncio.Task[None]]", + continue100: Optional["asyncio.Future[bool]"], + timer: Optional[BaseTimerContext], + request_info: RequestInfo, + traces: List["Trace"], loop: asyncio.AbstractEventLoop, - response_class: Optional[Type["ClientResponse"]] = None, - proxy: Optional[URL] = None, - proxy_auth: Optional[BasicAuth] = None, - timer: Optional[BaseTimerContext] = None, - session: Optional["ClientSession"] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - proxy_headers: Optional[LooseHeaders] = None, - traces: Optional[List["Trace"]] = None, - trust_env: bool = False, - server_hostname: Optional[str] = None, - ): - if match := _CONTAINS_CONTROL_CHAR_RE.search(method): - raise ValueError( - f"Method cannot contain non-token characters {method!r} " - f"(found at least {match.group()!r})" - ) + session: "ClientSession", + ) -> None: # URL forbids subclasses, so a simple type check is enough. - assert type(url) is URL, url - if proxy is not None: - assert type(proxy) is URL, proxy - # FIXME: session is None in tests only, need to fix tests - # assert session is not None - if TYPE_CHECKING: - assert session is not None - self._session = session - if params: - url = url.extend_query(params) - self.original_url = url - self.url = url.with_fragment(None) if url.raw_fragment else url - self.method = method.upper() - self.chunked = chunked - self.loop = loop - self.length = None - if response_class is None: - real_response_class = ClientResponse - else: - real_response_class = response_class - self.response_class: Type[ClientResponse] = real_response_class - self._timer = timer if timer is not None else TimerNoop() - self._ssl = ssl - self.server_hostname = server_hostname + assert type(url) is URL + + self.method = method + self._real_url = url + self._url = url.with_fragment(None) if url.raw_fragment else url + if writer is not None: + self._writer = writer + if continue100 is not None: + self._continue = continue100 + self._request_info = request_info + self._timer = timer if timer is not None else TimerNoop() + self._cache: Dict[str, Any] = {} + self._traces = traces + self._loop = loop + # Save reference to _resolve_charset, so that get_encoding() will still + # work after the response has finished reading the body. + # TODO: Fix session=None in tests (see ClientRequest.__init__). + if session is not None: + # store a reference to session #1985 + self._session = session + self._resolve_charset = session._resolve_charset if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) - self.update_version(version) - self.update_host(url) - self.update_headers(headers) - self.update_auto_headers(skip_auto_headers) - self.update_cookies(cookies) - self.update_content_encoding(data, compress) - self.update_auth(auth, trust_env) - self.update_proxy(proxy, proxy_auth, proxy_headers) - - self.update_body_from_data(data) - if data is not None or self.method not in self.GET_METHODS: - self.update_transfer_encoding() - self.update_expect_continue(expect100) - self._traces = [] if traces is None else traces - def __reset_writer(self, _: object = None) -> None: self.__writer = None - def _get_content_length(self) -> Optional[int]: - """Extract and validate Content-Length header value. - - Returns parsed Content-Length value or None if not set. - Raises ValueError if header exists but cannot be parsed as an integer. - """ - if hdrs.CONTENT_LENGTH not in self.headers: - return None - - content_length_hdr = self.headers[hdrs.CONTENT_LENGTH] - try: - return int(content_length_hdr) - except ValueError: - raise ValueError( - f"Invalid Content-Length header: {content_length_hdr}" - ) from None - - @property - def skip_auto_headers(self) -> CIMultiDict[None]: - return self._skip_auto_headers or CIMultiDict() - @property def _writer(self) -> Optional["asyncio.Task[None]"]: + """The writer task for streaming data. + + _writer is only provided for backwards compatibility + for subclasses that may need to access it. + """ return self.__writer @_writer.setter - def _writer(self, writer: "asyncio.Task[None]") -> None: + def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: + """Set the writer task for streaming data.""" if self.__writer is not None: self.__writer.remove_done_callback(self.__reset_writer) self.__writer = writer - writer.add_done_callback(self.__reset_writer) - - def is_ssl(self) -> bool: - return self.url.scheme in _SSL_SCHEMES + if writer is None: + return + if writer.done(): + # The writer is already done, so we can clear it immediately. + self.__writer = None + else: + writer.add_done_callback(self.__reset_writer) @property - def ssl(self) -> Union["SSLContext", bool, Fingerprint]: - return self._ssl + def cookies(self) -> SimpleCookie: + if self._cookies is None: + self._cookies = SimpleCookie() + return self._cookies - @property - def connection_key(self) -> ConnectionKey: # type: ignore[misc] - if proxy_headers := self.proxy_headers: - h: Optional[int] = hash(tuple(proxy_headers.items())) - else: - h = None - url = self.url - return tuple.__new__( - ConnectionKey, - ( - url.raw_host or "", - url.port, - url.scheme in _SSL_SCHEMES, - self._ssl, - self.proxy, - self.proxy_auth, - h, - ), - ) + @cookies.setter + def cookies(self, cookies: SimpleCookie) -> None: + self._cookies = cookies - @property - def host(self) -> str: - ret = self.url.raw_host - assert ret is not None - return ret + @reify + def url(self) -> URL: + return self._url - @property - def port(self) -> Optional[int]: - return self.url.port + @reify + def real_url(self) -> URL: + return self._real_url - @property - def body(self) -> Union[bytes, payload.Payload]: - """Request body.""" - # empty body is represented as bytes for backwards compatibility - return self._body or b"" + @reify + def host(self) -> str: + assert self._url.host is not None + return self._url.host - @body.setter - def body(self, value: Any) -> None: - """Set request body with warning for non-autoclose payloads. + @reify + def headers(self) -> "CIMultiDictProxy[str]": + return self._headers - WARNING: This setter must be called from within an event loop and is not - thread-safe. Setting body outside of an event loop may raise RuntimeError - when closing file-based payloads. + @reify + def raw_headers(self) -> RawHeaders: + return self._raw_headers - DEPRECATED: Direct assignment to body is deprecated and will be removed - in a future version. Use await update_body() instead for proper resource - management. - """ - # Close existing payload if present - if self._body is not None: - # Warn if the payload needs manual closing - # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload - _warn_if_unclosed_payload(self._body, stacklevel=3) - # NOTE: In the future, when we remove sync close support, - # this setter will need to be removed and only the async - # update_body() method will be available. For now, we call - # _close() for backwards compatibility. - self._body._close() - self._update_body(value) - - @property + @reify def request_info(self) -> RequestInfo: - headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) - # These are created on every request, so we use a NamedTuple - # for performance reasons. We don't use the RequestInfo.__new__ - # method because it has a different signature which is provided - # for backwards compatibility only. - return tuple.__new__( - RequestInfo, (self.url, self.method, headers, self.original_url) - ) - - @property - def session(self) -> "ClientSession": - """Return the ClientSession instance. + return self._request_info - This property provides access to the ClientSession that initiated - this request, allowing middleware to make additional requests - using the same session. - """ - return self._session + @reify + def content_disposition(self) -> Optional[ContentDisposition]: + raw = self._headers.get(hdrs.CONTENT_DISPOSITION) + if raw is None: + return None + disposition_type, params_dct = multipart.parse_content_disposition(raw) + params = MappingProxyType(params_dct) + filename = multipart.content_disposition_filename(params) + return ContentDisposition(disposition_type, params, filename) - def update_host(self, url: URL) -> None: - """Update destination host, port and connection type (ssl).""" - # get host/port - if not url.raw_host: - raise InvalidURL(url) + def __del__(self, _warnings: Any = warnings) -> None: + if self._closed: + return - # basic auth info - if url.raw_user or url.raw_password: - self.auth = helpers.BasicAuth(url.user or "", url.password or "") + if self._connection is not None: + self._connection.release() + self._cleanup_writer() - def update_version(self, version: Union[http.HttpVersion, str]) -> None: - """Convert request version to two elements tuple. + if self._loop.get_debug(): + _warnings.warn( + f"Unclosed response {self!r}", ResourceWarning, source=self + ) + context = {"client_response": self, "message": "Unclosed response"} + if self._source_traceback: + context["source_traceback"] = self._source_traceback + self._loop.call_exception_handler(context) - parser HTTP version '1.1' => (1, 1) - """ - if isinstance(version, str): - v = [part.strip() for part in version.split(".", 1)] - try: - version = http.HttpVersion(int(v[0]), int(v[1])) - except ValueError: - raise ValueError( - f"Can not parse http version number: {version}" - ) from None - self.version = version + def __repr__(self) -> str: + out = io.StringIO() + ascii_encodable_url = str(self.url) + if self.reason: + ascii_encodable_reason = self.reason.encode( + "ascii", "backslashreplace" + ).decode("ascii") + else: + ascii_encodable_reason = "None" + print( + "".format( + ascii_encodable_url, self.status, ascii_encodable_reason + ), + file=out, + ) + print(self.headers, file=out) + return out.getvalue() - def update_headers(self, headers: Optional[LooseHeaders]) -> None: - """Update request headers.""" - self.headers: CIMultiDict[str] = CIMultiDict() + @property + def connection(self) -> Optional["Connection"]: + return self._connection - # Build the host header - host = self.url.host_port_subcomponent + @reify + def history(self) -> Tuple["ClientResponse", ...]: + """A sequence of responses, if redirects occurred.""" + return self._history - # host_port_subcomponent is None when the URL is a relative URL. - # but we know we do not have a relative URL here. - assert host is not None - self.headers[hdrs.HOST] = host + @reify + def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": + links_str = ", ".join(self.headers.getall("link", [])) - if not headers: - return + if not links_str: + return MultiDictProxy(MultiDict()) - if isinstance(headers, (dict, MultiDictProxy, MultiDict)): - headers = headers.items() + links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() - for key, value in headers: # type: ignore[misc] - # A special case for Host header - if key in hdrs.HOST_ALL: - self.headers[key] = value - else: - self.headers.add(key, value) + for val in re.split(r",(?=\s*<)", links_str): + match = re.match(r"\s*<(.*)>(.*)", val) + if match is None: # Malformed link + continue + url, params_str = match.groups() + params = params_str.split(";")[1:] - def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: - if skip_auto_headers is not None: - self._skip_auto_headers = CIMultiDict( - (hdr, None) for hdr in sorted(skip_auto_headers) - ) - used_headers = self.headers.copy() - used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type] - else: - # Fast path when there are no headers to skip - # which is the most common case. - used_headers = self.headers + link: MultiDict[Union[str, URL]] = MultiDict() - for hdr, val in self.DEFAULT_HEADERS.items(): - if hdr not in used_headers: - self.headers[hdr] = val + for param in params: + match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) + if match is None: # Malformed param + continue + key, _, value, _ = match.groups() - if hdrs.USER_AGENT not in used_headers: - self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE + link.add(key, value) - def update_cookies(self, cookies: Optional[LooseCookies]) -> None: - """Update request cookies header.""" - if not cookies: - return + key = link.get("rel", url) - c = SimpleCookie() - if hdrs.COOKIE in self.headers: - c.load(self.headers.get(hdrs.COOKIE, "")) - del self.headers[hdrs.COOKIE] + link.add("url", self.url.join(URL(url))) - if isinstance(cookies, Mapping): - iter_cookies = cookies.items() - else: - iter_cookies = cookies # type: ignore[assignment] - for name, value in iter_cookies: - if isinstance(value, Morsel): - # Preserve coded_value - mrsl_val = value.get(value.key, Morsel()) - mrsl_val.set(value.key, value.value, value.coded_value) - c[name] = mrsl_val - else: - c[name] = value # type: ignore[assignment] + links.add(str(key), MultiDictProxy(link)) - self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() + return MultiDictProxy(links) - def update_content_encoding(self, data: Any, compress: Union[bool, str]) -> None: - """Set request content encoding.""" - self.compress = None - if not data: - return + async def start(self, connection: "Connection") -> "ClientResponse": + """Start response processing.""" + self._closed = False + self._protocol = connection.protocol + self._connection = connection - if self.headers.get(hdrs.CONTENT_ENCODING): - if compress: - raise ValueError( - "compress can not be set if Content-Encoding header is set" - ) - elif compress: - self.compress = compress if isinstance(compress, str) else "deflate" - self.headers[hdrs.CONTENT_ENCODING] = self.compress - self.chunked = True # enable chunked, no need to deal with length + with self._timer: + while True: + # read response + try: + protocol = self._protocol + message, payload = await protocol.read() # type: ignore[union-attr] + except http.HttpProcessingError as exc: + raise ClientResponseError( + self.request_info, + self.history, + status=exc.code, + message=exc.message, + headers=exc.headers, + ) from exc - def update_transfer_encoding(self) -> None: - """Analyze transfer-encoding header.""" - te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() + if message.code < 100 or message.code > 199 or message.code == 101: + break - if "chunked" in te: - if self.chunked: - raise ValueError( - "chunked can not be set " - 'if "Transfer-Encoding: chunked" header is set' - ) + if self._continue is not None: + set_result(self._continue, True) + self._continue = None - elif self.chunked: - if hdrs.CONTENT_LENGTH in self.headers: - raise ValueError( - "chunked can not be set if Content-Length header is set" - ) + # payload eof handler + payload.on_eof(self._response_eof) - self.headers[hdrs.TRANSFER_ENCODING] = "chunked" - elif ( - self._body is not None - and hdrs.CONTENT_LENGTH not in self.headers - and (size := self._body.size) is not None - ): - self.headers[hdrs.CONTENT_LENGTH] = str(size) + # response status + self.version = message.version + self.status = message.code + self.reason = message.reason - def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: - """Set basic auth.""" - if auth is None: - auth = self.auth - if auth is None and trust_env and self.url.host is not None: - netrc_obj = netrc_from_env() - with contextlib.suppress(LookupError): - auth = basicauth_from_netrc(netrc_obj, self.url.host) - if auth is None: - return + # headers + self._headers = message.headers # type is CIMultiDictProxy + self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] - if not isinstance(auth, helpers.BasicAuth): - raise TypeError("BasicAuth() tuple is required instead") + # payload + self.content = payload - self.headers[hdrs.AUTHORIZATION] = auth.encode() + # cookies + if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()): + cookies = SimpleCookie() + for hdr in cookie_hdrs: + try: + cookies.load(hdr) + except CookieError as exc: + client_logger.warning("Can not load response cookies: %s", exc) + self._cookies = cookies + return self - def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None: - """Update request body from data.""" - if self._body is not None: - _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel) + def _response_eof(self) -> None: + if self._closed: + return - if body is None: - self._body = None + # protocol could be None because connection could be detached + protocol = self._connection and self._connection.protocol + if protocol is not None and protocol.upgraded: return - # FormData - maybe_payload = body() if isinstance(body, FormData) else body + self._closed = True + self._cleanup_writer() + self._release_connection() - try: - body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None) - except payload.LookupError: - boundary: Optional[str] = None - if CONTENT_TYPE in self.headers: - boundary = parse_mimetype(self.headers[CONTENT_TYPE]).parameters.get( - "boundary" - ) - body_payload = FormData(maybe_payload, boundary=boundary)() # type: ignore[arg-type] + @property + def closed(self) -> bool: + return self._closed - self._body = body_payload - # enable chunked encoding if needed - if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers: - if (size := body_payload.size) is not None: - self.headers[hdrs.CONTENT_LENGTH] = str(size) - else: - self.chunked = True + def close(self) -> None: + if not self._released: + self._notify_content() - # copy payload headers - assert body_payload.headers - headers = self.headers - skip_headers = self._skip_auto_headers - for key, value in body_payload.headers.items(): - if key in headers or (skip_headers is not None and key in skip_headers): - continue - headers[key] = value + self._closed = True + if self._loop.is_closed(): + return - def _update_body(self, body: Any) -> None: - """Update request body after its already been set.""" - # Remove existing Content-Length header since body is changing - if hdrs.CONTENT_LENGTH in self.headers: - del self.headers[hdrs.CONTENT_LENGTH] + self._cleanup_writer() + if self._connection is not None: + self._connection.close() + self._connection = None - # Remove existing Transfer-Encoding header to avoid conflicts - if self.chunked and hdrs.TRANSFER_ENCODING in self.headers: - del self.headers[hdrs.TRANSFER_ENCODING] + def release(self) -> None: + if not self._released: + self._notify_content() - # Now update the body using the existing method - # Called from _update_body, add 1 to stacklevel from caller - self.update_body_from_data(body, _stacklevel=4) + self._closed = True - # Update transfer encoding headers if needed (same logic as __init__) - if body is not None or self.method not in self.GET_METHODS: - self.update_transfer_encoding() + self._cleanup_writer() + self._release_connection() - async def update_body(self, body: Any) -> None: + @property + def ok(self) -> bool: + """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. + + This is **not** a check for ``200 OK`` but a check that the response + status is under 400. """ - Update request body and close previous payload if needed. + return 400 > self.status - This method safely updates the request body by first closing any existing - payload to prevent resource leaks, then setting the new body. + def raise_for_status(self) -> None: + if not self.ok: + # reason should always be not None for a started response + assert self.reason is not None - IMPORTANT: Always use this method instead of setting request.body directly. - Direct assignment to request.body will leak resources if the previous body - contains file handles, streams, or other resources that need cleanup. + # If we're in a context we can rely on __aexit__() to release as the + # exception propagates. + if not self._in_context: + self.release() - Args: - body: The new body content. Can be: - - bytes/bytearray: Raw binary data - - str: Text data (will be encoded using charset from Content-Type) - - FormData: Form data that will be encoded as multipart/form-data - - Payload: A pre-configured payload object - - AsyncIterable: An async iterable of bytes chunks - - File-like object: Will be read and sent as binary data - - None: Clears the body + raise ClientResponseError( + self.request_info, + self.history, + status=self.status, + message=self.reason, + headers=self.headers, + ) - Usage: - # CORRECT: Use update_body - await request.update_body(b"new request data") + def _release_connection(self) -> None: + if self._connection is not None: + if self.__writer is None: + self._connection.release() + self._connection = None + else: + self.__writer.add_done_callback(lambda f: self._release_connection()) - # WRONG: Don't set body directly - # request.body = b"new request data" # This will leak resources! + async def _wait_released(self) -> None: + if self.__writer is not None: + try: + await self.__writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise + self._release_connection() - # Update with form data - form_data = FormData() - form_data.add_field('field', 'value') - await request.update_body(form_data) + def _cleanup_writer(self) -> None: + if self.__writer is not None: + self.__writer.cancel() + self._session = None - # Clear body - await request.update_body(None) + def _notify_content(self) -> None: + content = self.content + # content can be None here, but the types are cheated elsewhere. + if content and content.exception() is None: # type: ignore[truthy-bool] + set_exception(content, _CONNECTION_CLOSED_EXCEPTION) + self._released = True - Note: - This method is async because it may need to close file handles or - other resources associated with the previous payload. Always await - this method to ensure proper cleanup. + async def wait_for_close(self) -> None: + if self.__writer is not None: + try: + await self.__writer + except asyncio.CancelledError: + if ( + sys.version_info >= (3, 11) + and (task := asyncio.current_task()) + and task.cancelling() + ): + raise + self.release() - Warning: - Setting request.body directly is highly discouraged and can lead to: - - Resource leaks (unclosed file handles, streams) - - Memory leaks (unreleased buffers) - - Unexpected behavior with streaming payloads + async def read(self) -> bytes: + """Read response payload.""" + if self._body is None: + try: + self._body = await self.content.read() + for trace in self._traces: + await trace.send_response_chunk_received( + self.method, self.url, self._body + ) + except BaseException: + self.close() + raise + elif self._released: # Response explicitly released + raise ClientConnectionError("Connection closed") - It is not recommended to change the payload type in middleware. If the - body was already set (e.g., as bytes), it's best to keep the same type - rather than converting it (e.g., to str) as this may result in unexpected - behavior. + protocol = self._connection and self._connection.protocol + if protocol is None or not protocol.upgraded: + await self._wait_released() # Underlying connection released + return self._body - See Also: - - update_body_from_data: Synchronous body update without cleanup - - body property: Direct body access (STRONGLY DISCOURAGED) + def get_encoding(self) -> str: + ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() + mimetype = helpers.parse_mimetype(ctype) - """ - # Close existing payload if it exists and needs closing - if self._body is not None: - await self._body.close() - self._update_body(body) + encoding = mimetype.parameters.get("charset") + if encoding: + with contextlib.suppress(LookupError, ValueError): + return codecs.lookup(encoding).name - def update_expect_continue(self, expect: bool = False) -> None: - if expect: - self.headers[hdrs.EXPECT] = "100-continue" - elif ( - hdrs.EXPECT in self.headers - and self.headers[hdrs.EXPECT].lower() == "100-continue" + if mimetype.type == "application" and ( + mimetype.subtype == "json" or mimetype.subtype == "rdap" ): - expect = True + # RFC 7159 states that the default encoding is UTF-8. + # RFC 7483 defines application/rdap+json + return "utf-8" - if expect: - self._continue = self.loop.create_future() + if self._body is None: + raise RuntimeError( + "Cannot compute fallback encoding of a not yet read body" + ) - def update_proxy( - self, - proxy: Optional[URL], - proxy_auth: Optional[BasicAuth], - proxy_headers: Optional[LooseHeaders], - ) -> None: - self.proxy = proxy - if proxy is None: - self.proxy_auth = None - self.proxy_headers = None - return + return self._resolve_charset(self, self._body) - if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): - raise ValueError("proxy_auth must be None or BasicAuth() tuple") - self.proxy_auth = proxy_auth + async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: + """Read response payload and decode.""" + await self.read() - if proxy_headers is not None and not isinstance( - proxy_headers, (MultiDict, MultiDictProxy) - ): - proxy_headers = CIMultiDict(proxy_headers) - self.proxy_headers = proxy_headers + if encoding is None: + encoding = self.get_encoding() - async def write_bytes( + return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] + + async def json( self, - writer: AbstractStreamWriter, - conn: "Connection", - content_length: Optional[int], + *, + encoding: Optional[str] = None, + loads: JSONDecoder = DEFAULT_JSON_DECODER, + content_type: Optional[str] = "application/json", + ) -> Any: + """Read and decodes JSON response.""" + await self.read() + + if content_type: + if not is_expected_content_type(self.content_type, content_type): + raise ContentTypeError( + self.request_info, + self.history, + status=self.status, + message=( + "Attempt to decode JSON with " + "unexpected mimetype: %s" % self.content_type + ), + headers=self.headers, + ) + + if encoding is None: + encoding = self.get_encoding() + + return loads(self._body.decode(encoding)) # type: ignore[union-attr] + + async def __aenter__(self) -> "ClientResponse": + self._in_context = True + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], ) -> None: - """ - Write the request body to the connection stream. + self._in_context = False + # similar to _RequestContextManager, we do not need to check + # for exceptions, response object can close connection + # if state is broken + self.release() + await self.wait_for_close() - This method handles writing different types of request bodies: - 1. Payload objects (using their specialized write_with_length method) - 2. Bytes/bytearray objects - 3. Iterable body content - Args: - writer: The stream writer to write the body to - conn: The connection being used for this request - content_length: Optional maximum number of bytes to write from the body - (None means write the entire body) +class ClientRequest: + GET_METHODS = { + hdrs.METH_GET, + hdrs.METH_HEAD, + hdrs.METH_OPTIONS, + hdrs.METH_TRACE, + } + POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT} + ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE}) - The method properly handles: - - Waiting for 100-Continue responses if required - - Content length constraints for chunked encoding - - Error handling for network issues, cancellation, and other exceptions - - Signaling EOF and timeout management + DEFAULT_HEADERS = { + hdrs.ACCEPT: "*/*", + hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(), + } - Raises: - ClientOSError: When there's an OS-level error writing the body - ClientConnectionError: When there's a general connection error - asyncio.CancelledError: When the operation is cancelled + # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. + _body: Union[None, payload.Payload] = None + auth = None + response = None - """ - # 100 response - if self._continue is not None: - # Force headers to be sent before waiting for 100-continue - writer.send_headers() - await writer.drain() - await self._continue + # These class defaults help create_autospec() work correctly. + # If autospec is improved in future, maybe these can be removed. + url = URL() + method = "GET" - protocol = conn.protocol - assert protocol is not None - try: - # This should be a rare case but the - # self._body can be set to None while - # the task is being started or we wait above - # for the 100-continue response. - # The more likely case is we have an empty - # payload, but 100-continue is still expected. - if self._body is not None: - await self._body.write_with_length(writer, content_length) - except OSError as underlying_exc: - reraised_exc = underlying_exc + __writer: Optional["asyncio.Task[None]"] = None # async task for streaming data + _continue = None # waiter future for '100 Continue' response - # Distinguish between timeout and other OS errors for better error reporting - exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( - underlying_exc, asyncio.TimeoutError - ) - if exc_is_not_timeout: - reraised_exc = ClientOSError( - underlying_exc.errno, - f"Can not write request body for {self.url !s}", - ) + _skip_auto_headers: Optional["CIMultiDict[None]"] = None - set_exception(protocol, reraised_exc, underlying_exc) - except asyncio.CancelledError: - # Body hasn't been fully sent, so connection can't be reused - conn.close() - raise - except Exception as underlying_exc: - set_exception( - protocol, - ClientConnectionError( - "Failed to send bytes into the underlying connection " - f"{conn !s}: {underlying_exc!r}", - ), - underlying_exc, + # N.B. + # Adding __del__ method with self._writer closing doesn't make sense + # because _writer is instance method, thus it keeps a reference to self. + # Until writer has finished finalizer will not be called. + + def __init__( + self, + method: str, + url: URL, + *, + params: Query = None, + headers: Optional[LooseHeaders] = None, + skip_auto_headers: Optional[Iterable[str]] = None, + data: Any = None, + cookies: Optional[LooseCookies] = None, + auth: Optional[BasicAuth] = None, + version: http.HttpVersion = http.HttpVersion11, + compress: Union[str, bool] = False, + chunked: Optional[bool] = None, + expect100: bool = False, + loop: asyncio.AbstractEventLoop, + response_class: Optional[Type["ClientResponse"]] = None, + proxy: Optional[URL] = None, + proxy_auth: Optional[BasicAuth] = None, + timer: Optional[BaseTimerContext] = None, + session: Optional["ClientSession"] = None, + ssl: Union[SSLContext, bool, Fingerprint] = True, + proxy_headers: Optional[LooseHeaders] = None, + traces: Optional[List["Trace"]] = None, + trust_env: bool = False, + server_hostname: Optional[str] = None, + ): + if match := _CONTAINS_CONTROL_CHAR_RE.search(method): + raise ValueError( + f"Method cannot contain non-token characters {method!r} " + f"(found at least {match.group()!r})" ) + # URL forbids subclasses, so a simple type check is enough. + assert type(url) is URL, url + if proxy is not None: + assert type(proxy) is URL, proxy + # FIXME: session is None in tests only, need to fix tests + # assert session is not None + if TYPE_CHECKING: + assert session is not None + self._session = session + if params: + url = url.extend_query(params) + self.original_url = url + self.url = url.with_fragment(None) if url.raw_fragment else url + self.method = method.upper() + self.chunked = chunked + self.loop = loop + self.length = None + if response_class is None: + real_response_class = ClientResponse else: - # Successfully wrote the body, signal EOF and start response timeout - await writer.write_eof() - protocol.start_timeout() + real_response_class = response_class + self.response_class: Type[ClientResponse] = real_response_class + self._timer = timer if timer is not None else TimerNoop() + self._ssl = ssl + self.server_hostname = server_hostname - async def send(self, conn: "Connection") -> "ClientResponse": - # Specify request target: - # - CONNECT request must send authority form URI - # - not CONNECT proxy must send absolute form URI - # - most common is origin form URI - if self.method == hdrs.METH_CONNECT: - connect_host = self.url.host_subcomponent - assert connect_host is not None - path = f"{connect_host}:{self.url.port}" - elif self.proxy and not self.is_ssl(): - path = str(self.url) - else: - path = self.url.raw_path_qs + if loop.get_debug(): + self._source_traceback = traceback.extract_stack(sys._getframe(1)) - protocol = conn.protocol - assert protocol is not None - writer = StreamWriter( - protocol, - self.loop, - on_chunk_sent=( - functools.partial(self._on_chunk_request_sent, self.method, self.url) - if self._traces - else None - ), - on_headers_sent=( - functools.partial(self._on_headers_request_sent, self.method, self.url) - if self._traces - else None + self.update_version(version) + self.update_host(url) + self.update_headers(headers) + self.update_auto_headers(skip_auto_headers) + self.update_cookies(cookies) + self.update_content_encoding(data, compress) + self.update_auth(auth, trust_env) + self.update_proxy(proxy, proxy_auth, proxy_headers) + + self.update_body_from_data(data) + if data is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() + self.update_expect_continue(expect100) + self._traces = [] if traces is None else traces + + def __reset_writer(self, _: object = None) -> None: + self.__writer = None + + def _get_content_length(self) -> Optional[int]: + """Extract and validate Content-Length header value. + + Returns parsed Content-Length value or None if not set. + Raises ValueError if header exists but cannot be parsed as an integer. + """ + if hdrs.CONTENT_LENGTH not in self.headers: + return None + + content_length_hdr = self.headers[hdrs.CONTENT_LENGTH] + try: + return int(content_length_hdr) + except ValueError: + raise ValueError( + f"Invalid Content-Length header: {content_length_hdr}" + ) from None + + @property + def skip_auto_headers(self) -> CIMultiDict[None]: + return self._skip_auto_headers or CIMultiDict() + + @property + def _writer(self) -> Optional["asyncio.Task[None]"]: + return self.__writer + + @_writer.setter + def _writer(self, writer: "asyncio.Task[None]") -> None: + if self.__writer is not None: + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = writer + writer.add_done_callback(self.__reset_writer) + + def is_ssl(self) -> bool: + return self.url.scheme in _SSL_SCHEMES + + @property + def ssl(self) -> Union["SSLContext", bool, Fingerprint]: + return self._ssl + + @property + def connection_key(self) -> ConnectionKey: # type: ignore[misc] + if proxy_headers := self.proxy_headers: + h: Optional[int] = hash(tuple(proxy_headers.items())) + else: + h = None + url = self.url + return tuple.__new__( + ConnectionKey, + ( + url.raw_host or "", + url.port, + url.scheme in _SSL_SCHEMES, + self._ssl, + self.proxy, + self.proxy_auth, + h, ), ) - if self.compress: - writer.enable_compression(self.compress) + @property + def host(self) -> str: + ret = self.url.raw_host + assert ret is not None + return ret - if self.chunked is not None: - writer.enable_chunking() + @property + def port(self) -> Optional[int]: + return self.url.port - # set default content-type - if ( - self.method in self.POST_METHODS - and ( - self._skip_auto_headers is None - or hdrs.CONTENT_TYPE not in self._skip_auto_headers - ) - and hdrs.CONTENT_TYPE not in self.headers - ): - self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" + @property + def body(self) -> Union[payload.Payload, Literal[b""]]: + """Request body.""" + # empty body is represented as bytes for backwards compatibility + return self._body or b"" - v = self.version - if hdrs.CONNECTION not in self.headers: - if conn._connector.force_close: - if v == HttpVersion11: - self.headers[hdrs.CONNECTION] = "close" - elif v == HttpVersion10: - self.headers[hdrs.CONNECTION] = "keep-alive" + @body.setter + def body(self, value: Any) -> None: + """Set request body with warning for non-autoclose payloads. - # status + headers - status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" + WARNING: This setter must be called from within an event loop and is not + thread-safe. Setting body outside of an event loop may raise RuntimeError + when closing file-based payloads. - # Buffer headers for potential coalescing with body - await writer.write_headers(status_line, self.headers) + DEPRECATED: Direct assignment to body is deprecated and will be removed + in a future version. Use await update_body() instead for proper resource + management. + """ + # Close existing payload if present + if self._body is not None: + # Warn if the payload needs manual closing + # stacklevel=3: user code -> body setter -> _warn_if_unclosed_payload + _warn_if_unclosed_payload(self._body, stacklevel=3) + # NOTE: In the future, when we remove sync close support, + # this setter will need to be removed and only the async + # update_body() method will be available. For now, we call + # _close() for backwards compatibility. + self._body._close() + self._update_body(value) - task: Optional["asyncio.Task[None]"] - if self._body or self._continue is not None or protocol.writing_paused: - coro = self.write_bytes(writer, conn, self._get_content_length()) - if sys.version_info >= (3, 12): - # Optimization for Python 3.12, try to write - # bytes immediately to avoid having to schedule - # the task on the event loop. - task = asyncio.Task(coro, loop=self.loop, eager_start=True) - else: - task = self.loop.create_task(coro) - if task.done(): - task = None - else: - self._writer = task - else: - # We have nothing to write because - # - there is no body - # - the protocol does not have writing paused - # - we are not waiting for a 100-continue response - protocol.start_timeout() - writer.set_eof() - task = None - response_class = self.response_class - assert response_class is not None - self.response = response_class( - self.method, - self.original_url, - writer=task, - continue100=self._continue, - timer=self._timer, - request_info=self.request_info, - traces=self._traces, - loop=self.loop, - session=self._session, + @property + def request_info(self) -> RequestInfo: + headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers) + # These are created on every request, so we use a NamedTuple + # for performance reasons. We don't use the RequestInfo.__new__ + # method because it has a different signature which is provided + # for backwards compatibility only. + return tuple.__new__( + RequestInfo, (self.url, self.method, headers, self.original_url) ) - return self.response - async def close(self) -> None: - if self.__writer is not None: - try: - await self.__writer - except asyncio.CancelledError: - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise + @property + def session(self) -> "ClientSession": + """Return the ClientSession instance. - def terminate(self) -> None: - if self.__writer is not None: - if not self.loop.is_closed(): - self.__writer.cancel() - self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = None + This property provides access to the ClientSession that initiated + this request, allowing middleware to make additional requests + using the same session. + """ + return self._session - async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: - for trace in self._traces: - await trace.send_request_chunk_sent(method, url, chunk) + def update_host(self, url: URL) -> None: + """Update destination host, port and connection type (ssl).""" + # get host/port + if not url.raw_host: + raise InvalidURL(url) - async def _on_headers_request_sent( - self, method: str, url: URL, headers: "CIMultiDict[str]" - ) -> None: - for trace in self._traces: - await trace.send_request_headers(method, url, headers) + # basic auth info + if url.raw_user or url.raw_password: + self.auth = helpers.BasicAuth(url.user or "", url.password or "") + def update_version(self, version: Union[http.HttpVersion, str]) -> None: + """Convert request version to two elements tuple. -_CONNECTION_CLOSED_EXCEPTION = ClientConnectionError("Connection closed") + parser HTTP version '1.1' => (1, 1) + """ + if isinstance(version, str): + v = [part.strip() for part in version.split(".", 1)] + try: + version = http.HttpVersion(int(v[0]), int(v[1])) + except ValueError: + raise ValueError( + f"Can not parse http version number: {version}" + ) from None + self.version = version + def update_headers(self, headers: Optional[LooseHeaders]) -> None: + """Update request headers.""" + self.headers: CIMultiDict[str] = CIMultiDict() -class ClientResponse(HeadersMixin): - # Some of these attributes are None when created, - # but will be set by the start() method. - # As the end user will likely never see the None values, we cheat the types below. - # from the Status-Line of the response - version: Optional[HttpVersion] = None # HTTP-Version - status: int = None # type: ignore[assignment] # Status-Code - reason: Optional[str] = None # Reason-Phrase + # Build the host header + host = self.url.host_port_subcomponent - content: StreamReader = None # type: ignore[assignment] # Payload stream - _body: Optional[bytes] = None - _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] - _history: Tuple["ClientResponse", ...] = () - _raw_headers: RawHeaders = None # type: ignore[assignment] + # host_port_subcomponent is None when the URL is a relative URL. + # but we know we do not have a relative URL here. + assert host is not None + self.headers[hdrs.HOST] = host - _connection: Optional["Connection"] = None # current connection - _cookies: Optional[SimpleCookie] = None - _continue: Optional["asyncio.Future[bool]"] = None - _source_traceback: Optional[traceback.StackSummary] = None - _session: Optional["ClientSession"] = None - # set up by ClientRequest after ClientResponse object creation - # post-init stage allows to not change ctor signature - _closed = True # to allow __del__ for non-initialized properly response - _released = False - _in_context = False + if not headers: + return + + if isinstance(headers, (dict, MultiDictProxy, MultiDict)): + headers = headers.items() - _resolve_charset: Callable[["ClientResponse", bytes], str] = lambda *_: "utf-8" + for key, value in headers: # type: ignore[misc] + # A special case for Host header + if key in hdrs.HOST_ALL: + self.headers[key] = value + else: + self.headers.add(key, value) - __writer: Optional["asyncio.Task[None]"] = None + def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: + if skip_auto_headers is not None: + self._skip_auto_headers = CIMultiDict( + (hdr, None) for hdr in sorted(skip_auto_headers) + ) + used_headers = self.headers.copy() + used_headers.extend(self._skip_auto_headers) # type: ignore[arg-type] + else: + # Fast path when there are no headers to skip + # which is the most common case. + used_headers = self.headers - def __init__( - self, - method: str, - url: URL, - *, - writer: "Optional[asyncio.Task[None]]", - continue100: Optional["asyncio.Future[bool]"], - timer: Optional[BaseTimerContext], - request_info: RequestInfo, - traces: List["Trace"], - loop: asyncio.AbstractEventLoop, - session: "ClientSession", - ) -> None: - # URL forbids subclasses, so a simple type check is enough. - assert type(url) is URL + for hdr, val in self.DEFAULT_HEADERS.items(): + if hdr not in used_headers: + self.headers[hdr] = val - self.method = method + if hdrs.USER_AGENT not in used_headers: + self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE - self._real_url = url - self._url = url.with_fragment(None) if url.raw_fragment else url - if writer is not None: - self._writer = writer - if continue100 is not None: - self._continue = continue100 - self._request_info = request_info - self._timer = timer if timer is not None else TimerNoop() - self._cache: Dict[str, Any] = {} - self._traces = traces - self._loop = loop - # Save reference to _resolve_charset, so that get_encoding() will still - # work after the response has finished reading the body. - # TODO: Fix session=None in tests (see ClientRequest.__init__). - if session is not None: - # store a reference to session #1985 - self._session = session - self._resolve_charset = session._resolve_charset - if loop.get_debug(): - self._source_traceback = traceback.extract_stack(sys._getframe(1)) + def update_cookies(self, cookies: Optional[LooseCookies]) -> None: + """Update request cookies header.""" + if not cookies: + return - def __reset_writer(self, _: object = None) -> None: - self.__writer = None + c = SimpleCookie() + if hdrs.COOKIE in self.headers: + c.load(self.headers.get(hdrs.COOKIE, "")) + del self.headers[hdrs.COOKIE] - @property - def _writer(self) -> Optional["asyncio.Task[None]"]: - """The writer task for streaming data. + if isinstance(cookies, Mapping): + iter_cookies = cookies.items() + else: + iter_cookies = cookies # type: ignore[assignment] + for name, value in iter_cookies: + if isinstance(value, Morsel): + # Preserve coded_value + mrsl_val = value.get(value.key, Morsel()) + mrsl_val.set(value.key, value.value, value.coded_value) + c[name] = mrsl_val + else: + c[name] = value # type: ignore[assignment] - _writer is only provided for backwards compatibility - for subclasses that may need to access it. - """ - return self.__writer + self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip() - @_writer.setter - def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None: - """Set the writer task for streaming data.""" - if self.__writer is not None: - self.__writer.remove_done_callback(self.__reset_writer) - self.__writer = writer - if writer is None: + def update_content_encoding(self, data: Any, compress: Union[bool, str]) -> None: + """Set request content encoding.""" + self.compress = None + if not data: return - if writer.done(): - # The writer is already done, so we can clear it immediately. - self.__writer = None - else: - writer.add_done_callback(self.__reset_writer) - @property - def cookies(self) -> SimpleCookie: - if self._cookies is None: - self._cookies = SimpleCookie() - return self._cookies + if self.headers.get(hdrs.CONTENT_ENCODING): + if compress: + raise ValueError( + "compress can not be set if Content-Encoding header is set" + ) + elif compress: + self.compress = compress if isinstance(compress, str) else "deflate" + self.headers[hdrs.CONTENT_ENCODING] = self.compress + self.chunked = True # enable chunked, no need to deal with length - @cookies.setter - def cookies(self, cookies: SimpleCookie) -> None: - self._cookies = cookies + def update_transfer_encoding(self) -> None: + """Analyze transfer-encoding header.""" + te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower() - @reify - def url(self) -> URL: - return self._url + if "chunked" in te: + if self.chunked: + raise ValueError( + "chunked can not be set " + 'if "Transfer-Encoding: chunked" header is set' + ) - @reify - def real_url(self) -> URL: - return self._real_url + elif self.chunked: + if hdrs.CONTENT_LENGTH in self.headers: + raise ValueError( + "chunked can not be set if Content-Length header is set" + ) - @reify - def host(self) -> str: - assert self._url.host is not None - return self._url.host + self.headers[hdrs.TRANSFER_ENCODING] = "chunked" + elif ( + self._body is not None + and hdrs.CONTENT_LENGTH not in self.headers + and (size := self._body.size) is not None + ): + self.headers[hdrs.CONTENT_LENGTH] = str(size) - @reify - def headers(self) -> "CIMultiDictProxy[str]": - return self._headers + def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: + """Set basic auth.""" + if auth is None: + auth = self.auth + if auth is None and trust_env and self.url.host is not None: + netrc_obj = netrc_from_env() + with contextlib.suppress(LookupError): + auth = basicauth_from_netrc(netrc_obj, self.url.host) + if auth is None: + return - @reify - def raw_headers(self) -> RawHeaders: - return self._raw_headers + if not isinstance(auth, helpers.BasicAuth): + raise TypeError("BasicAuth() tuple is required instead") - @reify - def request_info(self) -> RequestInfo: - return self._request_info + self.headers[hdrs.AUTHORIZATION] = auth.encode() - @reify - def content_disposition(self) -> Optional[ContentDisposition]: - raw = self._headers.get(hdrs.CONTENT_DISPOSITION) - if raw is None: - return None - disposition_type, params_dct = multipart.parse_content_disposition(raw) - params = MappingProxyType(params_dct) - filename = multipart.content_disposition_filename(params) - return ContentDisposition(disposition_type, params, filename) + def update_body_from_data(self, body: Any, _stacklevel: int = 3) -> None: + """Update request body from data.""" + if self._body is not None: + _warn_if_unclosed_payload(self._body, stacklevel=_stacklevel) - def __del__(self, _warnings: Any = warnings) -> None: - if self._closed: + if body is None: + self._body = None return - if self._connection is not None: - self._connection.release() - self._cleanup_writer() + # FormData + maybe_payload = body() if isinstance(body, FormData) else body - if self._loop.get_debug(): - _warnings.warn( - f"Unclosed response {self!r}", ResourceWarning, source=self + try: + body_payload = payload.PAYLOAD_REGISTRY.get(maybe_payload, disposition=None) + except payload.LookupError: + boundary: Optional[str] = None + if CONTENT_TYPE in self.headers: + boundary = parse_mimetype(self.headers[CONTENT_TYPE]).parameters.get( + "boundary" ) - context = {"client_response": self, "message": "Unclosed response"} - if self._source_traceback: - context["source_traceback"] = self._source_traceback - self._loop.call_exception_handler(context) + body_payload = FormData(maybe_payload, boundary=boundary)() # type: ignore[arg-type] - def __repr__(self) -> str: - out = io.StringIO() - ascii_encodable_url = str(self.url) - if self.reason: - ascii_encodable_reason = self.reason.encode( - "ascii", "backslashreplace" - ).decode("ascii") - else: - ascii_encodable_reason = "None" - print( - "".format( - ascii_encodable_url, self.status, ascii_encodable_reason - ), - file=out, - ) - print(self.headers, file=out) - return out.getvalue() + self._body = body_payload + # enable chunked encoding if needed + if not self.chunked and hdrs.CONTENT_LENGTH not in self.headers: + if (size := body_payload.size) is not None: + self.headers[hdrs.CONTENT_LENGTH] = str(size) + else: + self.chunked = True + + # copy payload headers + assert body_payload.headers + headers = self.headers + skip_headers = self._skip_auto_headers + for key, value in body_payload.headers.items(): + if key in headers or (skip_headers is not None and key in skip_headers): + continue + headers[key] = value + + def _update_body(self, body: Any) -> None: + """Update request body after its already been set.""" + # Remove existing Content-Length header since body is changing + if hdrs.CONTENT_LENGTH in self.headers: + del self.headers[hdrs.CONTENT_LENGTH] + + # Remove existing Transfer-Encoding header to avoid conflicts + if self.chunked and hdrs.TRANSFER_ENCODING in self.headers: + del self.headers[hdrs.TRANSFER_ENCODING] - @property - def connection(self) -> Optional["Connection"]: - return self._connection + # Now update the body using the existing method + # Called from _update_body, add 1 to stacklevel from caller + self.update_body_from_data(body, _stacklevel=4) - @reify - def history(self) -> Tuple["ClientResponse", ...]: - """A sequence of responses, if redirects occurred.""" - return self._history + # Update transfer encoding headers if needed (same logic as __init__) + if body is not None or self.method not in self.GET_METHODS: + self.update_transfer_encoding() - @reify - def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": - links_str = ", ".join(self.headers.getall("link", [])) + async def update_body(self, body: Any) -> None: + """ + Update request body and close previous payload if needed. - if not links_str: - return MultiDictProxy(MultiDict()) + This method safely updates the request body by first closing any existing + payload to prevent resource leaks, then setting the new body. - links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() + IMPORTANT: Always use this method instead of setting request.body directly. + Direct assignment to request.body will leak resources if the previous body + contains file handles, streams, or other resources that need cleanup. - for val in re.split(r",(?=\s*<)", links_str): - match = re.match(r"\s*<(.*)>(.*)", val) - if match is None: # Malformed link - continue - url, params_str = match.groups() - params = params_str.split(";")[1:] + Args: + body: The new body content. Can be: + - bytes/bytearray: Raw binary data + - str: Text data (will be encoded using charset from Content-Type) + - FormData: Form data that will be encoded as multipart/form-data + - Payload: A pre-configured payload object + - AsyncIterable: An async iterable of bytes chunks + - File-like object: Will be read and sent as binary data + - None: Clears the body - link: MultiDict[Union[str, URL]] = MultiDict() + Usage: + # CORRECT: Use update_body + await request.update_body(b"new request data") - for param in params: - match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) - if match is None: # Malformed param - continue - key, _, value, _ = match.groups() + # WRONG: Don't set body directly + # request.body = b"new request data" # This will leak resources! - link.add(key, value) + # Update with form data + form_data = FormData() + form_data.add_field('field', 'value') + await request.update_body(form_data) - key = link.get("rel", url) + # Clear body + await request.update_body(None) - link.add("url", self.url.join(URL(url))) + Note: + This method is async because it may need to close file handles or + other resources associated with the previous payload. Always await + this method to ensure proper cleanup. - links.add(str(key), MultiDictProxy(link)) + Warning: + Setting request.body directly is highly discouraged and can lead to: + - Resource leaks (unclosed file handles, streams) + - Memory leaks (unreleased buffers) + - Unexpected behavior with streaming payloads - return MultiDictProxy(links) + It is not recommended to change the payload type in middleware. If the + body was already set (e.g., as bytes), it's best to keep the same type + rather than converting it (e.g., to str) as this may result in unexpected + behavior. - async def start(self, connection: "Connection") -> "ClientResponse": - """Start response processing.""" - self._closed = False - self._protocol = connection.protocol - self._connection = connection + See Also: + - update_body_from_data: Synchronous body update without cleanup + - body property: Direct body access (STRONGLY DISCOURAGED) - with self._timer: - while True: - # read response - try: - protocol = self._protocol - message, payload = await protocol.read() # type: ignore[union-attr] - except http.HttpProcessingError as exc: - raise ClientResponseError( - self.request_info, - self.history, - status=exc.code, - message=exc.message, - headers=exc.headers, - ) from exc + """ + # Close existing payload if it exists and needs closing + if self._body is not None: + await self._body.close() + self._update_body(body) - if message.code < 100 or message.code > 199 or message.code == 101: - break + def update_expect_continue(self, expect: bool = False) -> None: + if expect: + self.headers[hdrs.EXPECT] = "100-continue" + elif ( + hdrs.EXPECT in self.headers + and self.headers[hdrs.EXPECT].lower() == "100-continue" + ): + expect = True - if self._continue is not None: - set_result(self._continue, True) - self._continue = None + if expect: + self._continue = self.loop.create_future() - # payload eof handler - payload.on_eof(self._response_eof) + def update_proxy( + self, + proxy: Optional[URL], + proxy_auth: Optional[BasicAuth], + proxy_headers: Optional[LooseHeaders], + ) -> None: + self.proxy = proxy + if proxy is None: + self.proxy_auth = None + self.proxy_headers = None + return - # response status - self.version = message.version - self.status = message.code - self.reason = message.reason + if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth): + raise ValueError("proxy_auth must be None or BasicAuth() tuple") + self.proxy_auth = proxy_auth - # headers - self._headers = message.headers # type is CIMultiDictProxy - self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes] + if proxy_headers is not None and not isinstance( + proxy_headers, (MultiDict, MultiDictProxy) + ): + proxy_headers = CIMultiDict(proxy_headers) + self.proxy_headers = proxy_headers - # payload - self.content = payload + async def write_bytes( + self, + writer: AbstractStreamWriter, + conn: "Connection", + content_length: Optional[int], + ) -> None: + """ + Write the request body to the connection stream. - # cookies - if cookie_hdrs := self.headers.getall(hdrs.SET_COOKIE, ()): - cookies = SimpleCookie() - for hdr in cookie_hdrs: - try: - cookies.load(hdr) - except CookieError as exc: - client_logger.warning("Can not load response cookies: %s", exc) - self._cookies = cookies - return self + This method handles writing different types of request bodies: + 1. Payload objects (using their specialized write_with_length method) + 2. Bytes/bytearray objects + 3. Iterable body content - def _response_eof(self) -> None: - if self._closed: - return + Args: + writer: The stream writer to write the body to + conn: The connection being used for this request + content_length: Optional maximum number of bytes to write from the body + (None means write the entire body) - # protocol could be None because connection could be detached - protocol = self._connection and self._connection.protocol - if protocol is not None and protocol.upgraded: - return + The method properly handles: + - Waiting for 100-Continue responses if required + - Content length constraints for chunked encoding + - Error handling for network issues, cancellation, and other exceptions + - Signaling EOF and timeout management - self._closed = True - self._cleanup_writer() - self._release_connection() + Raises: + ClientOSError: When there's an OS-level error writing the body + ClientConnectionError: When there's a general connection error + asyncio.CancelledError: When the operation is cancelled - @property - def closed(self) -> bool: - return self._closed + """ + # 100 response + if self._continue is not None: + # Force headers to be sent before waiting for 100-continue + writer.send_headers() + await writer.drain() + await self._continue - def close(self) -> None: - if not self._released: - self._notify_content() + protocol = conn.protocol + assert protocol is not None + try: + # This should be a rare case but the + # self._body can be set to None while + # the task is being started or we wait above + # for the 100-continue response. + # The more likely case is we have an empty + # payload, but 100-continue is still expected. + if self._body is not None: + await self._body.write_with_length(writer, content_length) + except OSError as underlying_exc: + reraised_exc = underlying_exc - self._closed = True - if self._loop.is_closed(): - return + # Distinguish between timeout and other OS errors for better error reporting + exc_is_not_timeout = underlying_exc.errno is not None or not isinstance( + underlying_exc, asyncio.TimeoutError + ) + if exc_is_not_timeout: + reraised_exc = ClientOSError( + underlying_exc.errno, + f"Can not write request body for {self.url !s}", + ) - self._cleanup_writer() - if self._connection is not None: - self._connection.close() - self._connection = None + set_exception(protocol, reraised_exc, underlying_exc) + except asyncio.CancelledError: + # Body hasn't been fully sent, so connection can't be reused + conn.close() + raise + except Exception as underlying_exc: + set_exception( + protocol, + ClientConnectionError( + "Failed to send bytes into the underlying connection " + f"{conn !s}: {underlying_exc!r}", + ), + underlying_exc, + ) + else: + # Successfully wrote the body, signal EOF and start response timeout + await writer.write_eof() + protocol.start_timeout() - def release(self) -> None: - if not self._released: - self._notify_content() + async def send(self, conn: "Connection") -> "ClientResponse": + # Specify request target: + # - CONNECT request must send authority form URI + # - not CONNECT proxy must send absolute form URI + # - most common is origin form URI + if self.method == hdrs.METH_CONNECT: + connect_host = self.url.host_subcomponent + assert connect_host is not None + path = f"{connect_host}:{self.url.port}" + elif self.proxy and not self.is_ssl(): + path = str(self.url) + else: + path = self.url.raw_path_qs - self._closed = True + protocol = conn.protocol + assert protocol is not None + writer = StreamWriter( + protocol, + self.loop, + on_chunk_sent=( + functools.partial(self._on_chunk_request_sent, self.method, self.url) + if self._traces + else None + ), + on_headers_sent=( + functools.partial(self._on_headers_request_sent, self.method, self.url) + if self._traces + else None + ), + ) - self._cleanup_writer() - self._release_connection() + if self.compress: + writer.enable_compression(self.compress) - @property - def ok(self) -> bool: - """Returns ``True`` if ``status`` is less than ``400``, ``False`` if not. + if self.chunked is not None: + writer.enable_chunking() - This is **not** a check for ``200 OK`` but a check that the response - status is under 400. - """ - return 400 > self.status + # set default content-type + if ( + self.method in self.POST_METHODS + and ( + self._skip_auto_headers is None + or hdrs.CONTENT_TYPE not in self._skip_auto_headers + ) + and hdrs.CONTENT_TYPE not in self.headers + ): + self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream" - def raise_for_status(self) -> None: - if not self.ok: - # reason should always be not None for a started response - assert self.reason is not None + v = self.version + if hdrs.CONNECTION not in self.headers: + if conn._connector.force_close: + if v == HttpVersion11: + self.headers[hdrs.CONNECTION] = "close" + elif v == HttpVersion10: + self.headers[hdrs.CONNECTION] = "keep-alive" - # If we're in a context we can rely on __aexit__() to release as the - # exception propagates. - if not self._in_context: - self.release() + # status + headers + status_line = f"{self.method} {path} HTTP/{v.major}.{v.minor}" - raise ClientResponseError( - self.request_info, - self.history, - status=self.status, - message=self.reason, - headers=self.headers, - ) + # Buffer headers for potential coalescing with body + await writer.write_headers(status_line, self.headers) - def _release_connection(self) -> None: - if self._connection is not None: - if self.__writer is None: - self._connection.release() - self._connection = None + task: Optional["asyncio.Task[None]"] + if self._body or self._continue is not None or protocol.writing_paused: + coro = self.write_bytes(writer, conn, self._get_content_length()) + if sys.version_info >= (3, 12): + # Optimization for Python 3.12, try to write + # bytes immediately to avoid having to schedule + # the task on the event loop. + task = asyncio.Task(coro, loop=self.loop, eager_start=True) else: - self.__writer.add_done_callback(lambda f: self._release_connection()) + task = self.loop.create_task(coro) + if task.done(): + task = None + else: + self._writer = task + else: + # We have nothing to write because + # - there is no body + # - the protocol does not have writing paused + # - we are not waiting for a 100-continue response + protocol.start_timeout() + writer.set_eof() + task = None + response_class = self.response_class + assert response_class is not None + self.response = response_class( + self.method, + self.original_url, + writer=task, + continue100=self._continue, + timer=self._timer, + request_info=self.request_info, + traces=self._traces, + loop=self.loop, + session=self._session, + ) + return self.response - async def _wait_released(self) -> None: + async def close(self) -> None: if self.__writer is not None: try: await self.__writer @@ -1318,126 +1423,20 @@ async def _wait_released(self) -> None: and task.cancelling() ): raise - self._release_connection() - - def _cleanup_writer(self) -> None: - if self.__writer is not None: - self.__writer.cancel() - self._session = None - - def _notify_content(self) -> None: - content = self.content - # content can be None here, but the types are cheated elsewhere. - if content and content.exception() is None: # type: ignore[truthy-bool] - set_exception(content, _CONNECTION_CLOSED_EXCEPTION) - self._released = True - async def wait_for_close(self) -> None: + def terminate(self) -> None: if self.__writer is not None: - try: - await self.__writer - except asyncio.CancelledError: - if ( - sys.version_info >= (3, 11) - and (task := asyncio.current_task()) - and task.cancelling() - ): - raise - self.release() - - async def read(self) -> bytes: - """Read response payload.""" - if self._body is None: - try: - self._body = await self.content.read() - for trace in self._traces: - await trace.send_response_chunk_received( - self.method, self.url, self._body - ) - except BaseException: - self.close() - raise - elif self._released: # Response explicitly released - raise ClientConnectionError("Connection closed") - - protocol = self._connection and self._connection.protocol - if protocol is None or not protocol.upgraded: - await self._wait_released() # Underlying connection released - return self._body - - def get_encoding(self) -> str: - ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower() - mimetype = helpers.parse_mimetype(ctype) - - encoding = mimetype.parameters.get("charset") - if encoding: - with contextlib.suppress(LookupError, ValueError): - return codecs.lookup(encoding).name - - if mimetype.type == "application" and ( - mimetype.subtype == "json" or mimetype.subtype == "rdap" - ): - # RFC 7159 states that the default encoding is UTF-8. - # RFC 7483 defines application/rdap+json - return "utf-8" - - if self._body is None: - raise RuntimeError( - "Cannot compute fallback encoding of a not yet read body" - ) - - return self._resolve_charset(self, self._body) - - async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: - """Read response payload and decode.""" - await self.read() - - if encoding is None: - encoding = self.get_encoding() - - return self._body.decode(encoding, errors=errors) # type: ignore[union-attr] - - async def json( - self, - *, - encoding: Optional[str] = None, - loads: JSONDecoder = DEFAULT_JSON_DECODER, - content_type: Optional[str] = "application/json", - ) -> Any: - """Read and decodes JSON response.""" - await self.read() - - if content_type: - if not is_expected_content_type(self.content_type, content_type): - raise ContentTypeError( - self.request_info, - self.history, - status=self.status, - message=( - "Attempt to decode JSON with " - "unexpected mimetype: %s" % self.content_type - ), - headers=self.headers, - ) - - if encoding is None: - encoding = self.get_encoding() - - return loads(self._body.decode(encoding)) # type: ignore[union-attr] + if not self.loop.is_closed(): + self.__writer.cancel() + self.__writer.remove_done_callback(self.__reset_writer) + self.__writer = None - async def __aenter__(self) -> "ClientResponse": - self._in_context = True - return self + async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None: + for trace in self._traces: + await trace.send_request_chunk_sent(method, url, chunk) - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + async def _on_headers_request_sent( + self, method: str, url: URL, headers: "CIMultiDict[str]" ) -> None: - self._in_context = False - # similar to _RequestContextManager, we do not need to check - # for exceptions, response object can close connection - # if state is broken - self.release() - await self.wait_for_close() + for trace in self._traces: + await trace.send_request_headers(method, url, headers) diff --git a/docs/client_reference.rst b/docs/client_reference.rst index b08df9c05ba..287eba0e89d 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1866,12 +1866,9 @@ ClientRequest For more information about using middleware, see :ref:`aiohttp-client-middleware`. .. attribute:: body - :type: Payload | FormData + :type: Payload | Literal[b""] - The request body payload. This can be: - - - A :class:`Payload` object for raw data (default is empty bytes ``b""``) - - A :class:`FormData` object for form submissions + The request body payload (defaults to ``b""`` if no body passed). .. danger:: diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 052df257fe8..6fbf2c61a5a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -56,7 +56,7 @@ coverage==7.8.2 # via # -r requirements/test.in # pytest-cov -cryptography==45.0.2 +cryptography==45.0.3 # via # pyjwt # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index 1f00948a5d0..78a718c65be 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -56,7 +56,7 @@ coverage==7.8.2 # via # -r requirements/test.in # pytest-cov -cryptography==45.0.2 +cryptography==45.0.3 # via # pyjwt # trustme diff --git a/requirements/lint.txt b/requirements/lint.txt index 77df1c8ade8..a2917ec48e0 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -21,7 +21,7 @@ cfgv==3.4.0 # via pre-commit click==8.1.8 # via slotscheck -cryptography==45.0.2 +cryptography==45.0.3 # via trustme distlib==0.3.9 # via virtualenv diff --git a/requirements/test.txt b/requirements/test.txt index db685044f69..fa449a9de51 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -29,7 +29,7 @@ coverage==7.8.2 # via # -r requirements/test.in # pytest-cov -cryptography==45.0.2 +cryptography==45.0.3 # via trustme exceptiongroup==1.3.0 # via pytest diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py index 6da6850bafc..b649e0b601f 100644 --- a/tests/test_client_middleware_digest_auth.py +++ b/tests/test_client_middleware_digest_auth.py @@ -2,7 +2,7 @@ import io from hashlib import md5, sha1 -from typing import Generator, Union +from typing import Generator, Literal, Union from unittest import mock import pytest @@ -270,7 +270,7 @@ def KD(secret: str, data: str) -> str: @pytest.mark.parametrize( ("body", "body_str"), [ - (b"this is a body", "this is a body"), # Bytes case + (b"", ""), # Bytes case ( BytesIOPayload(io.BytesIO(b"this is a body")), "this is a body", @@ -280,7 +280,7 @@ def KD(secret: str, data: str) -> str: async def test_digest_response_exact_match( qop: str, algorithm: str, - body: Union[bytes, BytesIOPayload], + body: Union[Literal[b""], BytesIOPayload], body_str: str, mock_sha1_digest: mock.MagicMock, ) -> None: diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 361163c87a0..f736bd0e224 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -37,6 +37,7 @@ from aiohttp.compression_utils import ZLibBackend from aiohttp.connector import Connection from aiohttp.http import HttpVersion10, HttpVersion11, StreamWriter +from aiohttp.multipart import MultipartWriter from aiohttp.typedefs import LooseCookies @@ -757,7 +758,8 @@ async def test_formdata_boundary_from_headers( ) async with await req.send(conn): await asyncio.sleep(0) - assert req.body._boundary == boundary.encode() # type: ignore[union-attr] + assert isinstance(req.body, MultipartWriter) + assert req.body._boundary == boundary.encode() async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> None: @@ -767,7 +769,8 @@ async def test_post_data(loop: asyncio.AbstractEventLoop, conn: mock.Mock) -> No ) resp = await req.send(conn) assert "/" == req.url.path - assert b"life=42" == req.body._value # type: ignore[union-attr] + assert isinstance(req.body, payload.Payload) + assert b"life=42" == req.body._value assert "application/x-www-form-urlencoded" == req.headers["CONTENT-TYPE"] await req.close() resp.close() @@ -806,7 +809,8 @@ async def test_get_with_data(loop: asyncio.AbstractEventLoop) -> None: meth, URL("http://python.org/"), data={"life": "42"}, loop=loop ) assert "/" == req.url.path - assert b"life=42" == req.body._value # type: ignore[union-attr] + assert isinstance(req.body, payload.Payload) + assert b"life=42" == req.body._value await req.close() @@ -1320,7 +1324,7 @@ async def test_oserror_on_write_bytes( loop: asyncio.AbstractEventLoop, conn: mock.Mock ) -> None: req = ClientRequest("POST", URL("http://python.org/"), loop=loop) - req.body = b"test data" + req.body = b"test data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 writer = WriterMock() writer.write.side_effect = OSError @@ -1668,7 +1672,7 @@ async def test_write_bytes_with_content_length_limit( data = b"Hello World" req = ClientRequest("post", URL("http://python.org/"), loop=loop) - req.body = data + req.body = data # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 writer = StreamWriter(protocol=conn.protocol, loop=loop) # Use content_length=5 to truncate data @@ -1705,7 +1709,7 @@ async def gen() -> AsyncIterator[bytes]: req.body = gen() # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 else: - req.body = data + req.body = data # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 writer = StreamWriter(protocol=conn.protocol, loop=loop) # Use content_length=7 to truncate at the middle of Part2 @@ -1755,7 +1759,7 @@ async def test_warn_if_unclosed_payload_via_body_setter( ResourceWarning, match="The previous request body contains unclosed resources", ): - req.body = b"new data" + req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 await req.close() @@ -1773,7 +1777,7 @@ async def test_no_warn_for_autoclose_payload_via_body_setter( # Setting body again should not trigger warning since previous payload has autoclose=True with warnings.catch_warnings(record=True) as warning_list: warnings.simplefilter("always") - req.body = b"new data" + req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # Filter out any non-ResourceWarning warnings resource_warnings = [ @@ -1803,7 +1807,7 @@ async def test_no_warn_for_consumed_payload_via_body_setter( # Setting body again should not trigger warning since previous payload is consumed with warnings.catch_warnings(record=True) as warning_list: warnings.simplefilter("always") - req.body = b"new data" + req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # Filter out any non-ResourceWarning warnings resource_warnings = [ @@ -1922,7 +1926,7 @@ async def test_body_setter_closes_previous_payload( req._body = mock_payload # Update body with new data using setter - req.body = b"new body data" + req.body = b"new body data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # Verify the previous payload was closed using _close mock_payload._close.assert_called_once() @@ -2051,7 +2055,7 @@ async def test_warn_stacklevel_points_to_user_code( with warnings.catch_warnings(record=True) as warning_list: warnings.simplefilter("always", ResourceWarning) # This line should be reported as the warning source - req.body = b"new data" # LINE TO BE REPORTED + req.body = b"new data" # type: ignore[assignment] # https://github.com/python/mypy/issues/12892 # LINE TO BE REPORTED # Find the ResourceWarning resource_warnings = [