From e5628523712527785993978e5475127c7958b9bc Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Mon, 6 Oct 2025 23:20:17 +0100 Subject: [PATCH 01/51] Bump version with .dev0 (#11582) --- aiohttp/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 92a7adc59cb..443b05ff275 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.13.0" +__version__ = "3.13.1.dev0" from typing import TYPE_CHECKING, Tuple From 19e9b9f69fff3fd0adf268c9cd880f74540a1e25 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 00:44:20 +0100 Subject: [PATCH 02/51] [PR #11585/76e54a32 backport][3.14] Move Dependabot to 3.14 (#11586) **This is a backport of PR #11585 as merged into master (76e54a32dc03d37ccb6ce3a6438ea8aca6798867).** Co-authored-by: Sam Bull --- .github/dependabot.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 39cde1e004a..8b56354f345 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -25,7 +25,7 @@ updates: directory: "/" labels: - dependencies - target-branch: "3.13" + target-branch: "3.14" schedule: interval: "daily" open-pull-requests-limit: 10 @@ -37,7 +37,7 @@ updates: - dependency-type: "all" labels: - dependencies - target-branch: "3.13" + target-branch: "3.14" schedule: interval: "daily" open-pull-requests-limit: 10 From b51696b0b3286d0d347244e047dbbdb51ba066cd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 00:02:46 +0000 Subject: [PATCH 03/51] Bump multidict from 6.6.4 to 6.7.0 (#11587) Bumps [multidict](https://github.com/aio-libs/multidict) from 6.6.4 to 6.7.0.
Release notes

Sourced from multidict's releases.

6.7.0

Contributor-facing changes

  • Updated tests and added CI for CPython 3.14 -- by :user:kumaraditya303.

    Related issues and pull requests on GitHub: #1235.


Changelog

Sourced from multidict's changelog.

6.7.0

(2025-10-05)

Contributor-facing changes

  • Updated tests and added CI for CPython 3.14 -- by :user:kumaraditya303.

    Related issues and pull requests on GitHub: :issue:1235.


Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=multidict&package-manager=pip&previous-version=6.6.4&new-version=6.7.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/cython.txt | 2 +- requirements/dev.txt | 2 +- requirements/multidict.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 9 files changed, 9 insertions(+), 9 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index 08de9013dc4..b5589a5ae93 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base-ft.in idna==3.10 # via yarl -multidict==6.6.4 +multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/base.txt b/requirements/base.txt index 3b9af1959e8..d62506eff63 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -26,7 +26,7 @@ gunicorn==23.0.0 # via -r requirements/base.in idna==3.4 # via yarl -multidict==6.6.4 +multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d74296e4dc5..1f34510fa70 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -113,7 +113,7 @@ markupsafe==3.0.3 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.6.4 +multidict==6.7.0 # via # -r requirements/multidict.in # -r requirements/runtime-deps.in diff --git a/requirements/cython.txt b/requirements/cython.txt index 824e216600b..9f3e87f8ea7 100644 --- a/requirements/cython.txt +++ b/requirements/cython.txt @@ -6,7 +6,7 @@ # cython==3.1.4 # via -r requirements/cython.in -multidict==6.6.4 +multidict==6.7.0 # via -r requirements/multidict.in typing-extensions==4.15.0 # via multidict diff --git a/requirements/dev.txt b/requirements/dev.txt index 4f67db4f3e1..8cb5c8af18e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -111,7 +111,7 @@ markupsafe==3.0.3 # via jinja2 mdurl==0.1.2 # via markdown-it-py -multidict==6.6.4 +multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/multidict.txt b/requirements/multidict.txt index 04a7f1fc117..39d1c2a3c8e 100644 --- a/requirements/multidict.txt +++ b/requirements/multidict.txt @@ -4,7 +4,7 @@ # # pip-compile --allow-unsafe --output-file=requirements/multidict.txt --resolver=backtracking --strip-extras requirements/multidict.in # -multidict==6.6.4 +multidict==6.7.0 # via -r requirements/multidict.in typing-extensions==4.15.0 # via multidict diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 727745a5617..9c5be187f73 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -24,7 +24,7 @@ frozenlist==1.8.0 # aiosignal idna==3.10 # via yarl -multidict==6.6.4 +multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 3e2a7fa1d59..85155a18b8f 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -59,7 +59,7 @@ markdown-it-py==4.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.6.4 +multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index 3d356ab6c7f..f83d85b0f5b 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -59,7 +59,7 @@ markdown-it-py==3.0.0 # via rich mdurl==0.1.2 # via markdown-it-py -multidict==6.6.4 +multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl From ae295b801cfe5f70e81f8437d5d38903b70b050e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 00:50:59 +0000 Subject: [PATCH 04/51] Bump attrs from 25.3.0 to 25.4.0 (#11598) Bumps [attrs](https://github.com/sponsors/hynek) from 25.3.0 to 25.4.0.
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=attrs&package-manager=pip&previous-version=25.3.0&new-version=25.4.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index b5589a5ae93..eaca64b45b8 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -12,7 +12,7 @@ aiosignal==1.4.0 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.3.0 +attrs==25.4.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/base.txt b/requirements/base.txt index d62506eff63..9adaa938305 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -12,7 +12,7 @@ aiosignal==1.4.0 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.3.0 +attrs==25.4.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 1f34510fa70..8c8d041c880 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -22,7 +22,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==25.3.0 +attrs==25.4.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx diff --git a/requirements/dev.txt b/requirements/dev.txt index 8cb5c8af18e..329340f4aae 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -22,7 +22,7 @@ async-timeout==5.0.1 ; python_version < "3.11" # via # -r requirements/runtime-deps.in # valkey -attrs==25.3.0 +attrs==25.4.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 9c5be187f73..6000bbb99d2 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -12,7 +12,7 @@ aiosignal==1.4.0 # via -r requirements/runtime-deps.in async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.3.0 +attrs==25.4.0 # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 85155a18b8f..38fc1851a79 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -14,7 +14,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.3.0 +attrs==25.4.0 # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index f83d85b0f5b..6a3f340d339 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -14,7 +14,7 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in -attrs==25.3.0 +attrs==25.4.0 # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in From 482eaceb331d66e2c430752c584becc456971917 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 01:10:19 +0000 Subject: [PATCH 05/51] Bump idna from 3.3 to 3.10 (#11591) Bumps [idna](https://github.com/kjd/idna) from 3.3 to 3.10.
Release notes

Sourced from idna's releases.

v3.10

No release notes provided.

v3.9

No release notes provided.

v3.8

What's Changed

  • Fix regression where IDNAError exception was not being produced for certain inputs.
  • Add support for Python 3.13, drop support for Python 3.5 as it is no longer testable.
  • Documentation improvements
  • Updates to package testing using Github actions

Thanks to Hugo van Kemenade for contributions to this release.

Full Changelog: https://github.com/kjd/idna/compare/v3.7...v3.8

v3.7

What's Changed

  • Fix issue where specially crafted inputs to encode() could take exceptionally long amount of time to process. [CVE-2024-3651]

Thanks to Guido Vranken for reporting the issue.

Full Changelog: https://github.com/kjd/idna/compare/v3.6...v3.7

Changelog

Sourced from idna's changelog.

3.10 (2024-09-15) +++++++++++++++++

  • Reverted to Unicode 15.1.0 data. Unicode 16 has some significant changes to UTS46 processing that will require more work to properly implement.

3.9 (2024-09-13) ++++++++++++++++

  • Update to Unicode 16.0.0
  • Deprecate setup.cfg in favour of pyproject.toml
  • Use ruff for code formatting

Thanks to Waket Zheng for contributions to this release.

3.8 (2024-08-23) ++++++++++++++++

  • Fix regression where IDNAError exception was not being produced for certain inputs.
  • Add support for Python 3.13, drop support for Python 3.5 as it is no longer testable.
  • Documentation improvements
  • Updates to package testing using Github actions

Thanks to Hugo van Kemenade for contributions to this release.

3.7 (2024-04-11) ++++++++++++++++

  • Fix issue where specially crafted inputs to encode() could take exceptionally long amount of time to process. [CVE-2024-3651]

Thanks to Guido Vranken for reporting the issue.

3.6 (2023-11-25) ++++++++++++++++

  • Fix regression to include tests in source distribution.

3.5 (2023-11-24) ++++++++++++++++

  • Update to Unicode 15.1.0
  • String codec name is now "idna2008" as overriding the system codec "idna" was not working.
  • Fix typing error for codec encoding
  • "setup.cfg" has been added for this release due to some downstream lack of adherence to PEP 517. Should be removed in a future release so please prepare accordingly.

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=idna&package-manager=pip&previous-version=3.3&new-version=3.10)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/base.txt b/requirements/base.txt index 9adaa938305..ef3d2bd5b2d 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -24,7 +24,7 @@ frozenlist==1.8.0 # aiosignal gunicorn==23.0.0 # via -r requirements/base.in -idna==3.4 +idna==3.10 # via yarl multidict==6.7.0 # via diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 8c8d041c880..b30ef30bac5 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -90,7 +90,7 @@ gunicorn==23.0.0 # via -r requirements/base.in identify==2.6.15 # via pre-commit -idna==3.3 +idna==3.10 # via # requests # trustme diff --git a/requirements/dev.txt b/requirements/dev.txt index 329340f4aae..27cb4543ca0 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -88,7 +88,7 @@ gunicorn==23.0.0 # via -r requirements/base.in identify==2.6.15 # via pre-commit -idna==3.4 +idna==3.10 # via # requests # trustme diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 55c618df961..bf5ab08f083 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -18,7 +18,7 @@ click==8.1.8 # via towncrier docutils==0.21.2 # via sphinx -idna==3.4 +idna==3.10 # via requests imagesize==1.4.1 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 8de4c8642d5..31d20fd8f81 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -35,7 +35,7 @@ freezegun==1.5.5 # via -r requirements/lint.in identify==2.6.15 # via pre-commit -idna==3.7 +idna==3.10 # via trustme iniconfig==2.1.0 # via pytest diff --git a/requirements/test.txt b/requirements/test.txt index 6a3f340d339..931f836c880 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -47,7 +47,7 @@ frozenlist==1.8.0 # aiosignal gunicorn==23.0.0 # via -r requirements/base.in -idna==3.4 +idna==3.10 # via # trustme # yarl From a035451934c16c772fdd63d09cf3b268796c155b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 01:29:10 +0000 Subject: [PATCH 06/51] Bump yarl from 1.21.0 to 1.22.0 (#11596) Bumps [yarl](https://github.com/aio-libs/yarl) from 1.21.0 to 1.22.0.
Release notes

Sourced from yarl's releases.

1.22.0

Features

  • Added arm64 Windows wheel builds -- by :user:finnagin.

    Related issues and pull requests on GitHub: #1516.


1.20.1

Bug fixes

  • Started raising a :exc:ValueError exception raised for corrupted IPv6 URL values.

    These fixes the issue where exception :exc:IndexError was leaking from the internal code because of not being handled and transformed into a user-facing error. The problem was happening under the following conditions: empty IPv6 URL, brackets in reverse order.

    -- by :user:MaelPic.

    Related issues and pull requests on GitHub: #1512.

Packaging updates and notes for downstreams

  • Updated to use Cython 3.1 universally across the build path -- by :user:lysnikolaou.

    Related issues and pull requests on GitHub: #1514.

  • Made Cython line tracing opt-in via the with-cython-tracing build config setting -- by :user:bdraco.

    Previously, line tracing was enabled by default in :file:pyproject.toml, which caused build issues for some users and made wheels nearly twice as slow. Now line tracing is only enabled when explicitly requested via pip install . --config-setting=with-cython-tracing=true or by setting the YARL_CYTHON_TRACING environment variable.

    Related issues and pull requests on GitHub: #1521.


Changelog

Sourced from yarl's changelog.

1.22.0

(2025-10-05)

Features

  • Added arm64 Windows wheel builds -- by :user:finnagin.

    Related issues and pull requests on GitHub: :issue:1516.


Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=yarl&package-manager=pip&previous-version=1.21.0&new-version=1.22.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index eaca64b45b8..8f629b8a566 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -44,7 +44,7 @@ typing-extensions==4.15.0 # via # aiosignal # multidict -yarl==1.21.0 +yarl==1.22.0 # via -r requirements/runtime-deps.in zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/base.txt b/requirements/base.txt index ef3d2bd5b2d..4e37bd8498f 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -46,7 +46,7 @@ typing-extensions==4.15.0 # multidict uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" # via -r requirements/base.in -yarl==1.21.0 +yarl==1.22.0 # via -r requirements/runtime-deps.in zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index b30ef30bac5..3b49dec2d1c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -296,7 +296,7 @@ wait-for-it==2.3.0 # via -r requirements/test-common.in wheel==0.45.1 # via pip-tools -yarl==1.21.0 +yarl==1.22.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 27cb4543ca0..397ba176293 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -287,7 +287,7 @@ wait-for-it==2.3.0 # via -r requirements/test-common.in wheel==0.45.1 # via pip-tools -yarl==1.21.0 +yarl==1.22.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 6000bbb99d2..c4220c48940 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -40,7 +40,7 @@ typing-extensions==4.15.0 # via # aiosignal # multidict -yarl==1.21.0 +yarl==1.22.0 # via -r requirements/runtime-deps.in zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 38fc1851a79..59cb7ba6341 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -148,7 +148,7 @@ typing-inspection==0.4.2 # via pydantic wait-for-it==2.3.0 # via -r requirements/test-common.in -yarl==1.21.0 +yarl==1.22.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index 931f836c880..c1cccbba540 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -150,7 +150,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in wait-for-it==2.3.0 # via -r requirements/test-common.in -yarl==1.21.0 +yarl==1.22.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via -r requirements/test-common.in From 80eb0aa90985f956ca2dbf2789bdea73d560dcec Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Tue, 7 Oct 2025 18:40:57 +0100 Subject: [PATCH 07/51] [3.14] Drop Python 3.9 (#11601) --- .github/workflows/ci-cd.yml | 4 +- CHANGES.rst | 2 +- CHANGES/11601.breaking.rst | 1 + Makefile | 10 - aiohttp/__init__.py | 6 +- aiohttp/_cookie_helpers.py | 13 +- aiohttp/_websocket/helpers.py | 7 +- aiohttp/_websocket/models.py | 5 +- aiohttp/_websocket/reader_py.py | 32 +-- aiohttp/_websocket/writer.py | 6 +- aiohttp/abc.py | 42 +-- aiohttp/base_protocol.py | 8 +- aiohttp/client.py | 286 +++++++++----------- aiohttp/client_exceptions.py | 34 +-- aiohttp/client_middleware_digest_auth.py | 31 +-- aiohttp/client_proto.py | 30 +- aiohttp/client_reqrep.py | 147 +++++----- aiohttp/client_ws.py | 46 ++-- aiohttp/compression_utils.py | 30 +- aiohttp/connector.py | 197 ++++++-------- aiohttp/cookiejar.py | 43 ++- aiohttp/formdata.py | 15 +- aiohttp/hdrs.py | 4 +- aiohttp/helpers.py | 103 ++++--- aiohttp/http.py | 8 +- aiohttp/http_exceptions.py | 13 +- aiohttp/http_parser.py | 82 +++--- aiohttp/http_writer.py | 16 +- aiohttp/multipart.py | 98 +++---- aiohttp/payload.py | 110 ++++---- aiohttp/payload_streamer.py | 7 +- aiohttp/pytest_plugin.py | 35 +-- aiohttp/resolver.py | 26 +- aiohttp/streams.py | 53 ++-- aiohttp/test_utils.py | 75 +++-- aiohttp/tracing.py | 7 +- aiohttp/typedefs.py | 19 +- aiohttp/web.py | 68 ++--- aiohttp/web_app.py | 85 +++--- aiohttp/web_exceptions.py | 42 +-- aiohttp/web_fileresponse.py | 32 +-- aiohttp/web_log.py | 13 +- aiohttp/web_middlewares.py | 6 +- aiohttp/web_protocol.py | 74 +++-- aiohttp/web_request.py | 105 +++---- aiohttp/web_response.py | 128 ++++----- aiohttp/web_routedef.py | 35 +-- aiohttp/web_runner.py | 35 ++- aiohttp/web_server.py | 12 +- aiohttp/web_urldispatcher.py | 170 +++++------- aiohttp/web_ws.py | 71 +++-- aiohttp/worker.py | 10 +- docs/conf.py | 1 - docs/contributing.rst | 13 - examples/background_tasks.py | 6 +- examples/combined_middleware.py | 8 +- examples/fake_server.py | 3 +- examples/logging_middleware.py | 5 +- examples/retry_middleware.py | 12 +- examples/token_refresh_middleware.py | 17 +- examples/web_ws.py | 5 +- pyproject.toml | 2 +- setup.cfg | 6 +- setup.py | 4 +- tests/autobahn/Dockerfile.aiohttp | 2 +- tests/autobahn/server/server.py | 5 +- tests/autobahn/test_autobahn.py | 5 +- tests/conftest.py | 3 +- tests/test_benchmarks_client_request.py | 3 +- tests/test_benchmarks_http_websocket.py | 3 +- tests/test_benchmarks_web_urldispatcher.py | 28 +- tests/test_circular_imports.py | 9 +- tests/test_client_functional.py | 20 +- tests/test_client_middleware.py | 28 +- tests/test_client_middleware_digest_auth.py | 5 +- tests/test_client_request.py | 14 +- tests/test_client_response.py | 2 +- tests/test_client_session.py | 11 +- tests/test_client_ws.py | 4 +- tests/test_client_ws_functional.py | 6 +- tests/test_connector.py | 47 ++-- tests/test_cookiejar.py | 7 +- tests/test_helpers.py | 4 +- tests/test_http_parser.py | 8 +- tests/test_http_writer.py | 4 +- tests/test_multipart.py | 2 - tests/test_payload.py | 18 +- tests/test_proxy_functional.py | 12 +- tests/test_resolver.py | 16 +- tests/test_run_app.py | 19 +- tests/test_test_utils.py | 3 +- tests/test_web_app.py | 40 ++- tests/test_web_exceptions.py | 3 + tests/test_web_functional.py | 11 +- tests/test_web_response.py | 4 +- tests/test_web_sendfile_functional.py | 17 +- tests/test_web_urldispatcher.py | 7 +- tests/test_web_websocket_functional.py | 8 +- tests/test_websocket_parser.py | 9 +- tests/test_websocket_writer.py | 3 +- tests/test_worker.py | 6 +- 101 files changed, 1345 insertions(+), 1670 deletions(-) create mode 100644 CHANGES/11601.breaking.rst diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 05653d9c028..1e1eb60a77c 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -122,7 +122,7 @@ jobs: needs: gen_llhttp strategy: matrix: - pyver: [3.9, '3.10', '3.11', '3.12', '3.13', '3.14'] + pyver: ['3.10', '3.11', '3.12', '3.13', '3.14'] no-extensions: ['', 'Y'] os: [ubuntu, macos, windows] experimental: [false] @@ -132,7 +132,7 @@ jobs: - os: windows no-extensions: 'Y' include: - - pyver: pypy-3.9 + - pyver: pypy-3.10 no-extensions: 'Y' os: ubuntu experimental: false diff --git a/CHANGES.rst b/CHANGES.rst index 5fdccd5ca45..0bab04c4072 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -4546,7 +4546,7 @@ Bugfixes `#5853 `_ - Added ``params`` keyword argument to ``ClientSession.ws_connect``. -- :user:`hoh`. `#5868 `_ -- Uses :py:class:`~asyncio.ThreadedChildWatcher` under POSIX to allow setting up test loop in non-main thread. +- Uses ``asyncio.ThreadedChildWatcher`` under POSIX to allow setting up test loop in non-main thread. `#5877 `_ - Fix the error in handling the return value of `getaddrinfo`. `getaddrinfo` will return an `(int, bytes)` tuple, if CPython could not handle the address family. diff --git a/CHANGES/11601.breaking.rst b/CHANGES/11601.breaking.rst new file mode 100644 index 00000000000..c2eccbd9e1c --- /dev/null +++ b/CHANGES/11601.breaking.rst @@ -0,0 +1 @@ +Dropped support for Python 3.9 -- by :user:`Dreamsorcerer`. diff --git a/Makefile b/Makefile index cf621705e2e..af5f7146716 100644 --- a/Makefile +++ b/Makefile @@ -123,16 +123,6 @@ define run_tests_in_docker docker run --rm -ti -v `pwd`:/src -w /src "aiohttp-test-$(1)-$(2)" $(TEST_SPEC) endef -.PHONY: test-3.9-no-extensions test -test-3.9-no-extensions: - $(call run_tests_in_docker,3.9,y) -test-3.9: - $(call run_tests_in_docker,3.9,n) -test-3.10-no-extensions: - $(call run_tests_in_docker,3.10,y) -test-3.10: - $(call run_tests_in_docker,3.10,n) - .PHONY: clean clean: @rm -rf `find . -name __pycache__` diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 443b05ff275..0d5c5c4cba6 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,6 +1,6 @@ __version__ = "3.13.1.dev0" -from typing import TYPE_CHECKING, Tuple +from typing import TYPE_CHECKING from . import hdrs as hdrs from .client import ( @@ -130,7 +130,7 @@ GunicornWebWorker as GunicornWebWorker, ) -__all__: Tuple[str, ...] = ( +__all__: tuple[str, ...] = ( "hdrs", # client "AddrInfoType", @@ -257,7 +257,7 @@ ) -def __dir__() -> Tuple[str, ...]: +def __dir__() -> tuple[str, ...]: return __all__ + ("__doc__",) diff --git a/aiohttp/_cookie_helpers.py b/aiohttp/_cookie_helpers.py index 9e80b6065d7..7fe8f43d12b 100644 --- a/aiohttp/_cookie_helpers.py +++ b/aiohttp/_cookie_helpers.py @@ -6,8 +6,9 @@ """ import re +from collections.abc import Sequence from http.cookies import Morsel -from typing import List, Optional, Sequence, Tuple, cast +from typing import cast from .log import internal_logger @@ -156,7 +157,7 @@ def _unquote(value: str) -> str: return _unquote_sub(_unquote_replace, value) -def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: +def parse_cookie_header(header: str) -> list[tuple[str, Morsel[str]]]: """ Parse a Cookie header according to RFC 6265 Section 5.4. @@ -176,7 +177,7 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: if not header: return [] - cookies: List[Tuple[str, Morsel[str]]] = [] + cookies: list[tuple[str, Morsel[str]]] = [] i = 0 n = len(header) @@ -211,7 +212,7 @@ def parse_cookie_header(header: str) -> List[Tuple[str, Morsel[str]]]: return cookies -def parse_set_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[str]]]: +def parse_set_cookie_headers(headers: Sequence[str]) -> list[tuple[str, Morsel[str]]]: """ Parse cookie headers using a vendored version of SimpleCookie parsing. @@ -230,7 +231,7 @@ def parse_set_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[s This implementation handles unmatched quotes more gracefully to prevent cookie loss. See https://github.com/aio-libs/aiohttp/issues/7993 """ - parsed_cookies: List[Tuple[str, Morsel[str]]] = [] + parsed_cookies: list[tuple[str, Morsel[str]]] = [] for header in headers: if not header: @@ -239,7 +240,7 @@ def parse_set_cookie_headers(headers: Sequence[str]) -> List[Tuple[str, Morsel[s # Parse cookie string using SimpleCookie's algorithm i = 0 n = len(header) - current_morsel: Optional[Morsel[str]] = None + current_morsel: Morsel[str] | None = None morsel_seen = False while 0 <= i < n: diff --git a/aiohttp/_websocket/helpers.py b/aiohttp/_websocket/helpers.py index 0bb58df9228..6a42a0054f3 100644 --- a/aiohttp/_websocket/helpers.py +++ b/aiohttp/_websocket/helpers.py @@ -2,8 +2,9 @@ import functools import re +from re import Pattern from struct import Struct -from typing import TYPE_CHECKING, Final, List, Optional, Pattern, Tuple +from typing import TYPE_CHECKING, Final from ..helpers import NO_EXTENSIONS from .models import WSHandshakeError @@ -23,7 +24,7 @@ # Used by _websocket_mask_python @functools.lru_cache -def _xor_table() -> List[bytes]: +def _xor_table() -> list[bytes]: return [bytes(a ^ b for a in range(256)) for b in range(256)] @@ -74,7 +75,7 @@ def _websocket_mask_python(mask: bytes, data: bytearray) -> None: _WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?") -def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]: +def ws_ext_parse(extstr: str | None, isserver: bool = False) -> tuple[int, bool]: if not extstr: return 0, False diff --git a/aiohttp/_websocket/models.py b/aiohttp/_websocket/models.py index 7e89b965295..4c29ea9a984 100644 --- a/aiohttp/_websocket/models.py +++ b/aiohttp/_websocket/models.py @@ -1,8 +1,9 @@ """Models for WebSocket protocol versions 13 and 8.""" import json +from collections.abc import Callable from enum import IntEnum -from typing import Any, Callable, Final, NamedTuple, Optional, cast +from typing import Any, Final, NamedTuple, cast WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF]) @@ -51,7 +52,7 @@ class WSMessage(NamedTuple): type: WSMsgType # To type correctly, this would need some kind of tagged union for each type. data: Any - extra: Optional[str] + extra: str | None def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any: """Return parsed JSON data. diff --git a/aiohttp/_websocket/reader_py.py b/aiohttp/_websocket/reader_py.py index f966a1593c5..38682fcdf75 100644 --- a/aiohttp/_websocket/reader_py.py +++ b/aiohttp/_websocket/reader_py.py @@ -3,7 +3,7 @@ import asyncio import builtins from collections import deque -from typing import Deque, Final, Optional, Set, Tuple, Union +from typing import Final from ..base_protocol import BaseProtocol from ..compression_utils import ZLibDecompressor @@ -18,7 +18,7 @@ WSMsgType, ) -ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode} +ALLOWED_CLOSE_CODES: Final[set[int]] = {int(i) for i in WSCloseCode} # States for the reader, used to parse the WebSocket frame # integer values are used so they can be cythonized @@ -65,16 +65,16 @@ def __init__( self._limit = limit * 2 self._loop = loop self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._exception: Union[BaseException, None] = None - self._buffer: Deque[Tuple[WSMessage, int]] = deque() + self._waiter: asyncio.Future[None] | None = None + self._exception: BaseException | None = None + self._buffer: deque[tuple[WSMessage, int]] = deque() self._get_buffer = self._buffer.popleft self._put_buffer = self._buffer.append def is_eof(self) -> bool: return self._eof - def exception(self) -> Optional[BaseException]: + def exception(self) -> BaseException | None: return self._exception def set_exception( @@ -137,7 +137,7 @@ def __init__( self.queue = queue self._max_msg_size = max_msg_size - self._exc: Optional[Exception] = None + self._exc: Exception | None = None self._partial = bytearray() self._state = READ_HEADER @@ -149,11 +149,11 @@ def __init__( self._tail: bytes = b"" self._has_mask = False - self._frame_mask: Optional[bytes] = None + self._frame_mask: bytes | None = None self._payload_bytes_to_read = 0 self._payload_len_flag = 0 self._compressed: int = COMPRESSED_NOT_SET - self._decompressobj: Optional[ZLibDecompressor] = None + self._decompressobj: ZLibDecompressor | None = None self._compress = compress def feed_eof(self) -> None: @@ -162,9 +162,7 @@ def feed_eof(self) -> None: # data can be bytearray on Windows because proactor event loop uses bytearray # and asyncio types this to Union[bytes, bytearray, memoryview] so we need # coerce data to bytes if it is not - def feed_data( - self, data: Union[bytes, bytearray, memoryview] - ) -> Tuple[bool, bytes]: + def feed_data(self, data: bytes | bytearray | memoryview) -> tuple[bool, bytes]: if type(data) is not bytes: data = bytes(data) @@ -183,9 +181,9 @@ def feed_data( def _handle_frame( self, fin: bool, - opcode: Union[int, cython_int], # Union intended: Cython pxd uses C int - payload: Union[bytes, bytearray], - compressed: Union[int, cython_int], # Union intended: Cython pxd uses C int + opcode: int | cython_int, # Union intended: Cython pxd uses C int + payload: bytes | bytearray, + compressed: int | cython_int, # Union intended: Cython pxd uses C int ) -> None: msg: WSMessage if opcode in {OP_CODE_TEXT, OP_CODE_BINARY, OP_CODE_CONTINUATION}: @@ -221,7 +219,7 @@ def _handle_frame( f"to be zero, got {opcode!r}", ) - assembled_payload: Union[bytes, bytearray] + assembled_payload: bytes | bytearray if has_partial: assembled_payload = self._partial + payload self._partial.clear() @@ -441,7 +439,7 @@ def _feed_data(self, data: bytes) -> None: self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) break - payload: Union[bytes, bytearray] + payload: bytes | bytearray if had_fragments: # We have to join the payload fragments get the payload self._payload_fragments.append(data_cstr[f_start_pos:f_end_pos]) diff --git a/aiohttp/_websocket/writer.py b/aiohttp/_websocket/writer.py index 19163f9afdf..6307dba3670 100644 --- a/aiohttp/_websocket/writer.py +++ b/aiohttp/_websocket/writer.py @@ -3,7 +3,7 @@ import asyncio import random from functools import partial -from typing import Any, Final, Optional, Union +from typing import Any, Final from ..base_protocol import BaseProtocol from ..client_exceptions import ClientConnectionResetError @@ -65,7 +65,7 @@ def __init__( self._compressobj: Any = None # actually compressobj async def send_frame( - self, message: bytes, opcode: int, compress: Optional[int] = None + self, message: bytes, opcode: int, compress: int | None = None ) -> None: """Send a frame over the websocket with message as its payload.""" if self._closing and not (opcode & WSMsgType.CLOSE): @@ -166,7 +166,7 @@ def _make_compress_obj(self, compress: int) -> ZLibCompressor: max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE, ) - async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None: + async def close(self, code: int = 1000, message: bytes | str = b"") -> None: """Close the websocket, sending the specified code and message.""" if isinstance(message, str): message = message.encode("utf-8") diff --git a/aiohttp/abc.py b/aiohttp/abc.py index faf09575afb..00e9280c3b1 100644 --- a/aiohttp/abc.py +++ b/aiohttp/abc.py @@ -2,23 +2,9 @@ import logging import socket from abc import ABC, abstractmethod -from collections.abc import Sized +from collections.abc import Awaitable, Callable, Generator, Iterable, Sequence, Sized from http.cookies import BaseCookie, Morsel -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Dict, - Generator, - Iterable, - List, - Optional, - Sequence, - Tuple, - TypedDict, - Union, -) +from typing import TYPE_CHECKING, Any, TypedDict from multidict import CIMultiDict from yarl import URL @@ -32,8 +18,8 @@ from .web_request import BaseRequest, Request from .web_response import StreamResponse else: - BaseRequest = Request = Application = StreamResponse = None - HTTPException = None + BaseRequest = Request = Application = StreamResponse = Any + HTTPException = Any class AbstractRouter(ABC): @@ -74,21 +60,21 @@ def handler(self) -> Callable[[Request], Awaitable[StreamResponse]]: @abstractmethod def expect_handler( self, - ) -> Callable[[Request], Awaitable[Optional[StreamResponse]]]: + ) -> Callable[[Request], Awaitable[StreamResponse | None]]: """Expect handler for 100-continue processing""" @property # pragma: no branch @abstractmethod - def http_exception(self) -> Optional[HTTPException]: + def http_exception(self) -> HTTPException | None: """HTTPException instance raised on router's resolving, or None""" @abstractmethod # pragma: no branch - def get_info(self) -> Dict[str, Any]: + def get_info(self) -> dict[str, Any]: """Return a dict with additional info useful for introspection""" @property # pragma: no branch @abstractmethod - def apps(self) -> Tuple[Application, ...]: + def apps(self) -> tuple[Application, ...]: """Stack of nested applications. Top level application is left-most element. @@ -154,7 +140,7 @@ class AbstractResolver(ABC): @abstractmethod async def resolve( self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET - ) -> List[ResolveResult]: + ) -> list[ResolveResult]: """Return IP address for given hostname""" @abstractmethod @@ -174,7 +160,7 @@ async def close(self) -> None: class AbstractCookieJar(Sized, IterableBase): """Abstract Cookie Jar.""" - def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + def __init__(self, *, loop: asyncio.AbstractEventLoop | None = None) -> None: self._loop = loop or asyncio.get_running_loop() @property @@ -183,7 +169,7 @@ def quote_cookie(self) -> bool: """Return True if cookies should be quoted.""" @abstractmethod - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + def clear(self, predicate: ClearCookiePredicate | None = None) -> None: """Clear all cookies if no predicate is passed.""" @abstractmethod @@ -211,10 +197,10 @@ class AbstractStreamWriter(ABC): buffer_size: int = 0 output_size: int = 0 - length: Optional[int] = 0 + length: int | None = 0 @abstractmethod - async def write(self, chunk: Union[bytes, bytearray, memoryview]) -> None: + async def write(self, chunk: bytes | bytearray | memoryview) -> None: """Write chunk into stream.""" @abstractmethod @@ -227,7 +213,7 @@ async def drain(self) -> None: @abstractmethod def enable_compression( - self, encoding: str = "deflate", strategy: Optional[int] = None + self, encoding: str = "deflate", strategy: int | None = None ) -> None: """Enable HTTP body compression""" diff --git a/aiohttp/base_protocol.py b/aiohttp/base_protocol.py index b0a67ed6ff6..7f01830f4e9 100644 --- a/aiohttp/base_protocol.py +++ b/aiohttp/base_protocol.py @@ -1,5 +1,5 @@ import asyncio -from typing import Optional, cast +from typing import cast from .client_exceptions import ClientConnectionResetError from .helpers import set_exception @@ -19,10 +19,10 @@ class BaseProtocol(asyncio.Protocol): def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop: asyncio.AbstractEventLoop = loop self._paused = False - self._drain_waiter: Optional[asyncio.Future[None]] = None + self._drain_waiter: asyncio.Future[None] | None = None self._reading_paused = False - self.transport: Optional[asyncio.Transport] = None + self.transport: asyncio.Transport | None = None @property def connected(self) -> bool: @@ -68,7 +68,7 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: tcp_nodelay(tr, True) self.transport = tr - def connection_lost(self, exc: Optional[BaseException]) -> None: + def connection_lost(self, exc: BaseException | None) -> None: # Wake up the writer if currently paused. self.transport = None if not self._paused: diff --git a/aiohttp/client.py b/aiohttp/client.py index 0c72d5948ce..f627aa5d927 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -8,30 +8,18 @@ import sys import traceback import warnings -from contextlib import suppress -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, +from collections.abc import ( Awaitable, Callable, Coroutine, - Final, - FrozenSet, Generator, - Generic, Iterable, - List, Mapping, - Optional, Sequence, - Set, - Tuple, - Type, - TypedDict, - TypeVar, - Union, ) +from contextlib import suppress +from types import TracebackType +from typing import TYPE_CHECKING, Any, Final, Generic, TypedDict, TypeVar import attr from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr @@ -161,7 +149,7 @@ if TYPE_CHECKING: from ssl import SSLContext else: - SSLContext = None + SSLContext = Any if sys.version_info >= (3, 11) and TYPE_CHECKING: from typing import Unpack @@ -171,37 +159,37 @@ class _RequestOptions(TypedDict, total=False): params: Query data: Any json: Any - cookies: Union[LooseCookies, None] - headers: Union[LooseHeaders, None] - skip_auto_headers: Union[Iterable[str], None] - auth: Union[BasicAuth, None] + cookies: LooseCookies | None + headers: LooseHeaders | None + skip_auto_headers: Iterable[str] | None + auth: BasicAuth | None allow_redirects: bool max_redirects: int - compress: Union[str, bool, None] - chunked: Union[bool, None] + compress: str | bool | None + chunked: bool | None expect100: bool - raise_for_status: Union[None, bool, Callable[[ClientResponse], Awaitable[None]]] + raise_for_status: None | bool | Callable[[ClientResponse], Awaitable[None]] read_until_eof: bool - proxy: Union[StrOrURL, None] - proxy_auth: Union[BasicAuth, None] - timeout: "Union[ClientTimeout, _SENTINEL, None]" - ssl: Union[SSLContext, bool, Fingerprint] - server_hostname: Union[str, None] - proxy_headers: Union[LooseHeaders, None] - trace_request_ctx: Union[Mapping[str, Any], None] - read_bufsize: Union[int, None] - auto_decompress: Union[bool, None] - max_line_size: Union[int, None] - max_field_size: Union[int, None] - middlewares: Optional[Sequence[ClientMiddlewareType]] + proxy: StrOrURL | None + proxy_auth: BasicAuth | None + timeout: "ClientTimeout | _SENTINEL | None" + ssl: SSLContext | bool | Fingerprint + server_hostname: str | None + proxy_headers: LooseHeaders | None + trace_request_ctx: Mapping[str, Any] | None + read_bufsize: int | None + auto_decompress: bool | None + max_line_size: int | None + max_field_size: int | None + middlewares: Sequence[ClientMiddlewareType] | None @attr.s(auto_attribs=True, frozen=True, slots=True) class ClientTimeout: - total: Optional[float] = None - connect: Optional[float] = None - sock_read: Optional[float] = None - sock_connect: Optional[float] = None + total: float | None = None + connect: float | None = None + sock_read: float | None = None + sock_connect: float | None = None ceil_threshold: float = 5 # pool_queue_timeout: Optional[float] = None @@ -266,48 +254,46 @@ class ClientSession: ] ) - _source_traceback: Optional[traceback.StackSummary] = None - _connector: Optional[BaseConnector] = None + _source_traceback: traceback.StackSummary | None = None + _connector: BaseConnector | None = None def __init__( self, - base_url: Optional[StrOrURL] = None, + base_url: StrOrURL | None = None, *, - connector: Optional[BaseConnector] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - cookies: Optional[LooseCookies] = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, + connector: BaseConnector | None = None, + loop: asyncio.AbstractEventLoop | None = None, + cookies: LooseCookies | None = None, + headers: LooseHeaders | None = None, + proxy: StrOrURL | None = None, + proxy_auth: BasicAuth | None = None, + skip_auto_headers: Iterable[str] | None = None, + auth: BasicAuth | None = None, json_serialize: JSONEncoder = json.dumps, - request_class: Type[ClientRequest] = ClientRequest, - response_class: Type[ClientResponse] = ClientResponse, - ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse, + request_class: type[ClientRequest] = ClientRequest, + response_class: type[ClientResponse] = ClientResponse, + ws_response_class: type[ClientWebSocketResponse] = ClientWebSocketResponse, version: HttpVersion = http.HttpVersion11, - cookie_jar: Optional[AbstractCookieJar] = None, + cookie_jar: AbstractCookieJar | None = None, connector_owner: bool = True, - raise_for_status: Union[ - bool, Callable[[ClientResponse], Awaitable[None]] - ] = False, - read_timeout: Union[float, _SENTINEL] = sentinel, - conn_timeout: Optional[float] = None, - timeout: Union[object, ClientTimeout] = sentinel, + raise_for_status: bool | Callable[[ClientResponse], Awaitable[None]] = False, + read_timeout: float | _SENTINEL = sentinel, + conn_timeout: float | None = None, + timeout: object | ClientTimeout = sentinel, auto_decompress: bool = True, trust_env: bool = False, requote_redirect_url: bool = True, - trace_configs: Optional[List[TraceConfig]] = None, + trace_configs: list[TraceConfig] | None = None, read_bufsize: int = 2**16, max_line_size: int = 8190, max_field_size: int = 8190, fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8", middlewares: Sequence[ClientMiddlewareType] = (), - ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel, + ssl_shutdown_timeout: _SENTINEL | None | float = sentinel, ) -> None: # We initialise _connector to None immediately, as it's referenced in __del__() # and could cause issues if an exception occurs during initialisation. - self._connector: Optional[BaseConnector] = None + self._connector: BaseConnector | None = None if loop is None: if connector is not None: @@ -316,7 +302,7 @@ def __init__( loop = loop or asyncio.get_running_loop() if base_url is None or isinstance(base_url, URL): - self._base_url: Optional[URL] = base_url + self._base_url: URL | None = base_url self._base_url_origin = None if base_url is None else base_url.origin() else: self._base_url = URL(base_url) @@ -427,10 +413,9 @@ def __init__( self._retry_connection: bool = True self._middlewares = middlewares - def __init_subclass__(cls: Type["ClientSession"]) -> None: + def __init_subclass__(cls: type["ClientSession"]) -> None: warnings.warn( - "Inheritance class {} from ClientSession " - "is discouraged".format(cls.__name__), + f"Inheritance class {cls.__name__} from ClientSession " "is discouraged", DeprecationWarning, stacklevel=2, ) @@ -440,8 +425,7 @@ def __init_subclass__(cls: Type["ClientSession"]) -> None: def __setattr__(self, name: str, val: Any) -> None: if name not in self.ATTRS: warnings.warn( - "Setting custom ClientSession.{} attribute " - "is discouraged".format(name), + f"Setting custom ClientSession.{name} attribute " "is discouraged", DeprecationWarning, stacklevel=2, ) @@ -489,34 +473,34 @@ async def _request( params: Query = None, data: Any = None, json: Any = None, - cookies: Optional[LooseCookies] = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, - auth: Optional[BasicAuth] = None, + cookies: LooseCookies | None = None, + headers: LooseHeaders | None = None, + skip_auto_headers: Iterable[str] | None = None, + auth: BasicAuth | None = None, allow_redirects: bool = True, max_redirects: int = 10, - compress: Union[str, bool, None] = None, - chunked: Optional[bool] = None, + compress: str | bool | None = None, + chunked: bool | None = None, expect100: bool = False, - raise_for_status: Union[ - None, bool, Callable[[ClientResponse], Awaitable[None]] - ] = None, + raise_for_status: ( + None | bool | Callable[[ClientResponse], Awaitable[None]] + ) = None, read_until_eof: bool = True, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - timeout: Union[ClientTimeout, _SENTINEL] = sentinel, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - server_hostname: Optional[str] = None, - proxy_headers: Optional[LooseHeaders] = None, - trace_request_ctx: Optional[Mapping[str, Any]] = None, - read_bufsize: Optional[int] = None, - auto_decompress: Optional[bool] = None, - max_line_size: Optional[int] = None, - max_field_size: Optional[int] = None, - middlewares: Optional[Sequence[ClientMiddlewareType]] = None, + proxy: StrOrURL | None = None, + proxy_auth: BasicAuth | None = None, + timeout: ClientTimeout | _SENTINEL = sentinel, + verify_ssl: bool | None = None, + fingerprint: bytes | None = None, + ssl_context: SSLContext | None = None, + ssl: SSLContext | bool | Fingerprint = True, + server_hostname: str | None = None, + proxy_headers: LooseHeaders | None = None, + trace_request_ctx: Mapping[str, Any] | None = None, + read_bufsize: int | None = None, + auto_decompress: bool | None = None, + max_line_size: int | None = None, + max_field_size: int | None = None, + middlewares: Sequence[ClientMiddlewareType] | None = None, ) -> ClientResponse: # NOTE: timeout clamps existing connect and read timeouts. We cannot @@ -539,7 +523,7 @@ async def _request( warnings.warn("Chunk size is deprecated #1615", DeprecationWarning) redirects = 0 - history: List[ClientResponse] = [] + history: list[ClientResponse] = [] version = self._version params = params or {} @@ -555,7 +539,7 @@ async def _request( if url.scheme not in self._connector.allowed_protocol_schema_set: raise NonHttpUrlClientError(url) - skip_headers: Optional[Iterable[istr]] + skip_headers: Iterable[istr] | None if skip_auto_headers is not None: skip_headers = { istr(i) for i in skip_auto_headers @@ -681,7 +665,7 @@ async def _request( if req_cookies: all_cookies.load(req_cookies) - proxy_: Optional[URL] = None + proxy_: URL | None = None if proxy is not None: proxy_ = URL(proxy) elif self._trust_env: @@ -930,23 +914,23 @@ def ws_connect( *, method: str = hdrs.METH_GET, protocols: Iterable[str] = (), - timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, - receive_timeout: Optional[float] = None, + timeout: ClientWSTimeout | _SENTINEL = sentinel, + receive_timeout: float | None = None, autoclose: bool = True, autoping: bool = True, - heartbeat: Optional[float] = None, - auth: Optional[BasicAuth] = None, - origin: Optional[str] = None, + heartbeat: float | None = None, + auth: BasicAuth | None = None, + origin: str | None = None, params: Query = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - server_hostname: Optional[str] = None, - proxy_headers: Optional[LooseHeaders] = None, + headers: LooseHeaders | None = None, + proxy: StrOrURL | None = None, + proxy_auth: BasicAuth | None = None, + ssl: SSLContext | bool | Fingerprint = True, + verify_ssl: bool | None = None, + fingerprint: bytes | None = None, + ssl_context: SSLContext | None = None, + server_hostname: str | None = None, + proxy_headers: LooseHeaders | None = None, compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, ) -> "_WSRequestContextManager": @@ -984,23 +968,23 @@ async def _ws_connect( *, method: str = hdrs.METH_GET, protocols: Iterable[str] = (), - timeout: Union[ClientWSTimeout, _SENTINEL] = sentinel, - receive_timeout: Optional[float] = None, + timeout: ClientWSTimeout | _SENTINEL = sentinel, + receive_timeout: float | None = None, autoclose: bool = True, autoping: bool = True, - heartbeat: Optional[float] = None, - auth: Optional[BasicAuth] = None, - origin: Optional[str] = None, + heartbeat: float | None = None, + auth: BasicAuth | None = None, + origin: str | None = None, params: Query = None, - headers: Optional[LooseHeaders] = None, - proxy: Optional[StrOrURL] = None, - proxy_auth: Optional[BasicAuth] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - verify_ssl: Optional[bool] = None, - fingerprint: Optional[bytes] = None, - ssl_context: Optional[SSLContext] = None, - server_hostname: Optional[str] = None, - proxy_headers: Optional[LooseHeaders] = None, + headers: LooseHeaders | None = None, + proxy: StrOrURL | None = None, + proxy_auth: BasicAuth | None = None, + ssl: SSLContext | bool | Fingerprint = True, + verify_ssl: bool | None = None, + fingerprint: bytes | None = None, + ssl_context: SSLContext | None = None, + server_hostname: str | None = None, + proxy_headers: LooseHeaders | None = None, compress: int = 0, max_msg_size: int = 4 * 1024 * 1024, ) -> ClientWebSocketResponse: @@ -1195,14 +1179,14 @@ async def _ws_connect( client_notakeover=notakeover, ) - def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]": + def _prepare_headers(self, headers: LooseHeaders | None) -> "CIMultiDict[str]": """Add default headers and transform it to CIMultiDict""" # Convert headers to MultiDict result = CIMultiDict(self._default_headers) if headers: if not isinstance(headers, (MultiDictProxy, MultiDict)): headers = CIMultiDict(headers) - added_names: Set[str] = set() + added_names: set[str] = set() for key, value in headers.items(): if key in added_names: result.add(key, value) @@ -1336,7 +1320,7 @@ def closed(self) -> bool: return self._connector is None or self._connector.closed @property - def connector(self) -> Optional[BaseConnector]: + def connector(self) -> BaseConnector | None: """Connector instance used for the session.""" return self._connector @@ -1346,7 +1330,7 @@ def cookie_jar(self) -> AbstractCookieJar: return self._cookie_jar @property - def version(self) -> Tuple[int, int]: + def version(self) -> tuple[int, int]: """The session HTTP protocol version.""" return self._version @@ -1384,12 +1368,12 @@ def headers(self) -> "CIMultiDict[str]": return self._default_headers @property - def skip_auto_headers(self) -> FrozenSet[istr]: + def skip_auto_headers(self) -> frozenset[istr]: """Headers for which autogeneration should be skipped""" return self._skip_auto_headers @property - def auth(self) -> Optional[BasicAuth]: + def auth(self) -> BasicAuth | None: """An object that represents HTTP Basic Authorization""" return self._default_auth @@ -1406,7 +1390,7 @@ def connector_owner(self) -> bool: @property def raise_for_status( self, - ) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]: + ) -> bool | Callable[[ClientResponse], Awaitable[None]]: """Should `ClientResponse.raise_for_status()` be called for each response.""" return self._raise_for_status @@ -1426,7 +1410,7 @@ def trust_env(self) -> bool: return self._trust_env @property - def trace_configs(self) -> List[TraceConfig]: + def trace_configs(self) -> list[TraceConfig]: """A list of TraceConfig instances used for client tracing""" return self._trace_configs @@ -1442,9 +1426,9 @@ def __enter__(self) -> None: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> None: # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover @@ -1454,9 +1438,9 @@ async def __aenter__(self) -> "ClientSession": async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> None: await self.close() @@ -1466,7 +1450,7 @@ class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType __slots__ = ("_coro", "_resp") def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None: - self._coro: Coroutine["asyncio.Future[Any]", None, _RetType] = coro + self._coro: Coroutine[asyncio.Future[Any], None, _RetType] = coro def send(self, arg: None) -> "asyncio.Future[Any]": return self._coro.send(arg) @@ -1490,9 +1474,9 @@ async def __aenter__(self) -> _RetType: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc: BaseException | None, + tb: TracebackType | None, ) -> None: await self._resp.__aexit__(exc_type, exc, tb) @@ -1511,7 +1495,7 @@ def __init__( session: ClientSession, ) -> None: self._coro = coro - self._resp: Optional[ClientResponse] = None + self._resp: ClientResponse | None = None self._session = session async def __aenter__(self) -> ClientResponse: @@ -1525,9 +1509,9 @@ async def __aenter__(self) -> ClientResponse: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc: BaseException | None, + tb: TracebackType | None, ) -> None: assert self._resp is not None self._resp.close() @@ -1541,8 +1525,8 @@ def request( url: StrOrURL, *, version: HttpVersion = http.HttpVersion11, - connector: Optional[BaseConnector] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, + connector: BaseConnector | None = None, + loop: asyncio.AbstractEventLoop | None = None, **kwargs: Unpack[_RequestOptions], ) -> _SessionRequestContextManager: ... @@ -1553,8 +1537,8 @@ def request( url: StrOrURL, *, version: HttpVersion = http.HttpVersion11, - connector: Optional[BaseConnector] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, + connector: BaseConnector | None = None, + loop: asyncio.AbstractEventLoop | None = None, **kwargs: Any, ) -> _SessionRequestContextManager: """Constructs and sends a request. diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py index 1d298e9a8cf..a371f736ecb 100644 --- a/aiohttp/client_exceptions.py +++ b/aiohttp/client_exceptions.py @@ -2,7 +2,7 @@ import asyncio import warnings -from typing import TYPE_CHECKING, Optional, Tuple, Union +from typing import TYPE_CHECKING, Union from multidict import MultiMapping @@ -75,12 +75,12 @@ class ClientResponseError(ClientError): def __init__( self, request_info: RequestInfo, - history: Tuple[ClientResponse, ...], + history: tuple[ClientResponse, ...], *, - code: Optional[int] = None, - status: Optional[int] = None, + code: int | None = None, + status: int | None = None, message: str = "", - headers: Optional[MultiMapping[str]] = None, + headers: MultiMapping[str] | None = None, ) -> None: self.request_info = request_info if code is not None: @@ -106,11 +106,7 @@ def __init__( self.args = (request_info, history) def __str__(self) -> str: - return "{}, message={!r}, url={!r}".format( - self.status, - self.message, - str(self.request_info.real_url), - ) + return f"{self.status}, message={self.message!r}, url={str(self.request_info.real_url)!r}" def __repr__(self) -> str: args = f"{self.request_info!r}, {self.history!r}" @@ -196,7 +192,7 @@ def host(self) -> str: return self._conn_key.host @property - def port(self) -> Optional[int]: + def port(self) -> int | None: return self._conn_key.port @property @@ -258,7 +254,7 @@ class ServerConnectionError(ClientConnectionError): class ServerDisconnectedError(ServerConnectionError): """Server disconnected.""" - def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None: + def __init__(self, message: RawResponseMessage | str | None = None) -> None: if message is None: message = "Server disconnected" @@ -289,9 +285,7 @@ def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None: self.args = (expected, got, host, port) def __repr__(self) -> str: - return "<{} expected={!r} got={!r} host={!r} port={!r}>".format( - self.__class__.__name__, self.expected, self.got, self.host, self.port - ) + return f"<{self.__class__.__name__} expected={self.expected!r} got={self.got!r} host={self.host!r} port={self.port!r}>" class ClientPayloadError(ClientError): @@ -307,7 +301,7 @@ class InvalidURL(ClientError, ValueError): # Derive from ValueError for backward compatibility - def __init__(self, url: StrOrURL, description: Union[str, None] = None) -> None: + def __init__(self, url: StrOrURL, description: str | None = None) -> None: # The type of url is not yarl.URL because the exception can be raised # on URL(url) call self._url = url @@ -402,7 +396,7 @@ def host(self) -> str: return self._conn_key.host @property - def port(self) -> Optional[int]: + def port(self) -> int | None: return self._conn_key.port @property @@ -411,9 +405,9 @@ def ssl(self) -> bool: def __str__(self) -> str: return ( - "Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} " - "[{0.certificate_error.__class__.__name__}: " - "{0.certificate_error.args}]".format(self) + f"Cannot connect to host {self.host}:{self.port} ssl:{self.ssl} " + f"[{self.certificate_error.__class__.__name__}: " + f"{self.certificate_error.args}]" ) diff --git a/aiohttp/client_middleware_digest_auth.py b/aiohttp/client_middleware_digest_auth.py index c1ed7ca0fdd..18d47c96219 100644 --- a/aiohttp/client_middleware_digest_auth.py +++ b/aiohttp/client_middleware_digest_auth.py @@ -11,17 +11,8 @@ import os import re import time -from typing import ( - Callable, - Dict, - Final, - FrozenSet, - List, - Literal, - Tuple, - TypedDict, - Union, -) +from collections.abc import Callable +from typing import Final, Literal, TypedDict from yarl import URL @@ -42,7 +33,7 @@ class DigestAuthChallenge(TypedDict, total=False): stale: str -DigestFunctions: Dict[str, Callable[[bytes], "hashlib._Hash"]] = { +DigestFunctions: dict[str, Callable[[bytes], "hashlib._Hash"]] = { "MD5": hashlib.md5, "MD5-SESS": hashlib.md5, "SHA": hashlib.sha1, @@ -83,7 +74,7 @@ class DigestAuthChallenge(TypedDict, total=False): # RFC 7616: Challenge parameters to extract CHALLENGE_FIELDS: Final[ - Tuple[ + tuple[ Literal["realm", "nonce", "qop", "algorithm", "opaque", "domain", "stale"], ... ] ] = ( @@ -98,14 +89,14 @@ class DigestAuthChallenge(TypedDict, total=False): # Supported digest authentication algorithms # Use a tuple of sorted keys for predictable documentation and error messages -SUPPORTED_ALGORITHMS: Final[Tuple[str, ...]] = tuple(sorted(DigestFunctions.keys())) +SUPPORTED_ALGORITHMS: Final[tuple[str, ...]] = tuple(sorted(DigestFunctions.keys())) # RFC 7616: Fields that require quoting in the Digest auth header # These fields must be enclosed in double quotes in the Authorization header. # Algorithm, qop, and nc are never quoted per RFC specifications. # This frozen set is used by the template-based header construction to # automatically determine which fields need quotes. -QUOTED_AUTH_FIELDS: Final[FrozenSet[str]] = frozenset( +QUOTED_AUTH_FIELDS: Final[frozenset[str]] = frozenset( {"username", "realm", "nonce", "uri", "response", "opaque", "cnonce"} ) @@ -120,7 +111,7 @@ def unescape_quotes(value: str) -> str: return value.replace('\\"', '"') -def parse_header_pairs(header: str) -> Dict[str, str]: +def parse_header_pairs(header: str) -> dict[str, str]: """ Parse key-value pairs from WWW-Authenticate or similar HTTP headers. @@ -202,11 +193,9 @@ def __init__( self._challenge: DigestAuthChallenge = {} self._preemptive: bool = preemptive # Set of URLs defining the protection space - self._protection_space: List[str] = [] + self._protection_space: list[str] = [] - async def _encode( - self, method: str, url: URL, body: Union[Payload, Literal[b""]] - ) -> str: + async def _encode(self, method: str, url: URL, body: Payload | Literal[b""]) -> str: """ Build digest authorization header for the current challenge. @@ -358,7 +347,7 @@ def KD(s: bytes, d: bytes) -> bytes: header_fields["cnonce"] = cnonce # Build header using templates for each field type - pairs: List[str] = [] + pairs: list[str] = [] for field, value in header_fields.items(): if field in QUOTED_AUTH_FIELDS: pairs.append(f'{field}="{value}"') diff --git a/aiohttp/client_proto.py b/aiohttp/client_proto.py index e2fb1ce64cb..6eb6ffffed3 100644 --- a/aiohttp/client_proto.py +++ b/aiohttp/client_proto.py @@ -1,6 +1,6 @@ import asyncio from contextlib import suppress -from typing import Any, Optional, Tuple, Union +from typing import Any from .base_protocol import BaseProtocol from .client_exceptions import ( @@ -22,7 +22,7 @@ from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader -class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]): +class ResponseHandler(BaseProtocol, DataQueue[tuple[RawResponseMessage, StreamReader]]): """Helper class to adapt between Protocol and StreamReader.""" def __init__(self, loop: asyncio.AbstractEventLoop) -> None: @@ -31,7 +31,7 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._should_close = False - self._payload: Optional[StreamReader] = None + self._payload: StreamReader | None = None self._skip_payload = False self._payload_parser = None @@ -39,18 +39,18 @@ def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._tail = b"" self._upgraded = False - self._parser: Optional[HttpResponseParser] = None + self._parser: HttpResponseParser | None = None - self._read_timeout: Optional[float] = None - self._read_timeout_handle: Optional[asyncio.TimerHandle] = None + self._read_timeout: float | None = None + self._read_timeout_handle: asyncio.TimerHandle | None = None - self._timeout_ceil_threshold: Optional[float] = 5 + self._timeout_ceil_threshold: float | None = 5 - self._closed: Union[None, asyncio.Future[None]] = None + self._closed: None | asyncio.Future[None] = None self._connection_lost_called = False @property - def closed(self) -> Union[None, asyncio.Future[None]]: + def closed(self) -> None | asyncio.Future[None]: """Future that is set when the connection is closed. This property returns a Future that will be completed when the connection @@ -107,7 +107,7 @@ def abort(self) -> None: def is_connected(self) -> bool: return self.transport is not None and not self.transport.is_closing() - def connection_lost(self, exc: Optional[BaseException]) -> None: + def connection_lost(self, exc: BaseException | None) -> None: self._connection_lost_called = True self._drop_timeout() @@ -221,11 +221,11 @@ def set_parser(self, parser: Any, payload: Any) -> None: def set_response_params( self, *, - timer: Optional[BaseTimerContext] = None, + timer: BaseTimerContext | None = None, skip_payload: bool = False, read_until_eof: bool = False, auto_decompress: bool = True, - read_timeout: Optional[float] = None, + read_timeout: float | None = None, read_bufsize: int = 2**16, timeout_ceil_threshold: float = 5, max_line_size: int = 8190, @@ -275,11 +275,11 @@ def start_timeout(self) -> None: self._reschedule_timeout() @property - def read_timeout(self) -> Optional[float]: + def read_timeout(self) -> float | None: return self._read_timeout @read_timeout.setter - def read_timeout(self, read_timeout: Optional[float]) -> None: + def read_timeout(self, read_timeout: float | None) -> None: self._read_timeout = read_timeout def _on_read_timeout(self) -> None: @@ -333,7 +333,7 @@ def data_received(self, data: bytes) -> None: self._upgraded = upgraded - payload: Optional[StreamReader] = None + payload: StreamReader | None = None for message, payload in messages: if message.should_close: self._should_close = True diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 4f5bed9b9c9..43106d217e7 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -7,24 +7,11 @@ import sys import traceback import warnings -from collections.abc import Mapping +from collections.abc import Callable, Iterable, Mapping from hashlib import md5, sha1, sha256 from http.cookies import Morsel, SimpleCookie from types import MappingProxyType, TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - List, - Literal, - NamedTuple, - Optional, - Tuple, - Type, - Union, -) +from typing import TYPE_CHECKING, Any, Literal, NamedTuple, Optional, Union import attr from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy @@ -118,9 +105,9 @@ def _gen_default_accept_encoding() -> str: @attr.s(auto_attribs=True, frozen=True, slots=True) class ContentDisposition: - type: Optional[str] + type: str | None parameters: "MappingProxyType[str, str]" - filename: Optional[str] + filename: str | None class _RequestInfo(NamedTuple): @@ -137,7 +124,7 @@ def __new__( url: URL, method: str, headers: "CIMultiDictProxy[str]", - real_url: Union[URL, _SENTINEL] = sentinel, + real_url: URL | _SENTINEL = sentinel, ) -> "RequestInfo": """Create a new RequestInfo instance. @@ -188,9 +175,9 @@ def check(self, transport: asyncio.Transport) -> None: def _merge_ssl_params( ssl: Union["SSLContext", bool, Fingerprint], - verify_ssl: Optional[bool], + verify_ssl: bool | None, ssl_context: Optional["SSLContext"], - fingerprint: Optional[bytes], + fingerprint: bytes | None, ) -> Union["SSLContext", bool, Fingerprint]: if ssl is None: ssl = True # Double check for backwards compatibility @@ -236,7 +223,7 @@ def _merge_ssl_params( if not isinstance(ssl, SSL_ALLOWED_TYPES): raise TypeError( "ssl should be SSLContext, bool, Fingerprint or None, " - "got {!r} instead.".format(ssl) + f"got {ssl!r} instead." ) return ssl @@ -251,12 +238,12 @@ class ConnectionKey(NamedTuple): # the key should contain an information about used proxy / TLS # to prevent reusing wrong connections from a pool host: str - port: Optional[int] + port: int | None is_ssl: bool - ssl: Union[SSLContext, bool, Fingerprint] - proxy: Optional[URL] - proxy_auth: Optional[BasicAuth] - proxy_headers_hash: Optional[int] # hash(CIMultiDict) + ssl: SSLContext | bool | Fingerprint + proxy: URL | None + proxy_auth: BasicAuth | None + proxy_headers_hash: int | None # hash(CIMultiDict) def _is_expected_content_type( @@ -292,21 +279,21 @@ class ClientResponse(HeadersMixin): # but will be set by the start() method. # As the end user will likely never see the None values, we cheat the types below. # from the Status-Line of the response - version: Optional[HttpVersion] = None # HTTP-Version + version: HttpVersion | None = None # HTTP-Version status: int = None # type: ignore[assignment] # Status-Code - reason: Optional[str] = None # Reason-Phrase + reason: str | None = None # Reason-Phrase content: StreamReader = None # type: ignore[assignment] # Payload stream - _body: Optional[bytes] = None + _body: bytes | None = None _headers: CIMultiDictProxy[str] = None # type: ignore[assignment] - _history: Tuple["ClientResponse", ...] = () + _history: tuple["ClientResponse", ...] = () _raw_headers: RawHeaders = None # type: ignore[assignment] _connection: Optional["Connection"] = None # current connection - _cookies: Optional[SimpleCookie] = None - _raw_cookie_headers: Optional[Tuple[str, ...]] = None + _cookies: SimpleCookie | None = None + _raw_cookie_headers: tuple[str, ...] | None = None _continue: Optional["asyncio.Future[bool]"] = None - _source_traceback: Optional[traceback.StackSummary] = None + _source_traceback: traceback.StackSummary | None = None _session: Optional["ClientSession"] = None # set up by ClientRequest after ClientResponse object creation # post-init stage allows to not change ctor signature @@ -323,11 +310,11 @@ def __init__( method: str, url: URL, *, - writer: "Optional[asyncio.Task[None]]", + writer: "asyncio.Task[None] | None", continue100: Optional["asyncio.Future[bool]"], timer: BaseTimerContext, request_info: RequestInfo, - traces: List["Trace"], + traces: list["Trace"], loop: asyncio.AbstractEventLoop, session: "ClientSession", ) -> None: @@ -344,7 +331,7 @@ def __init__( self._continue = continue100 self._request_info = request_info self._timer = timer if timer is not None else TimerNoop() - self._cache: Dict[str, Any] = {} + self._cache: dict[str, Any] = {} self._traces = traces self._loop = loop # Save reference to _resolve_charset, so that get_encoding() will still @@ -439,7 +426,7 @@ def request_info(self) -> RequestInfo: return self._request_info @reify - def content_disposition(self) -> Optional[ContentDisposition]: + def content_disposition(self) -> ContentDisposition | None: raw = self._headers.get(hdrs.CONTENT_DISPOSITION) if raw is None: return None @@ -474,9 +461,7 @@ def __repr__(self) -> str: else: ascii_encodable_reason = "None" print( - "".format( - ascii_encodable_url, self.status, ascii_encodable_reason - ), + f"", file=out, ) print(self.headers, file=out) @@ -487,18 +472,18 @@ def connection(self) -> Optional["Connection"]: return self._connection @reify - def history(self) -> Tuple["ClientResponse", ...]: + def history(self) -> tuple["ClientResponse", ...]: """A sequence of of responses, if redirects occurred.""" return self._history @reify - def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": + def links(self) -> "MultiDictProxy[MultiDictProxy[str | URL]]": links_str = ", ".join(self.headers.getall("link", [])) if not links_str: return MultiDictProxy(MultiDict()) - links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict() + links: MultiDict[MultiDictProxy[str | URL]] = MultiDict() for val in re.split(r",(?=\s*<)", links_str): match = re.match(r"\s*<(.*)>(.*)", val) @@ -508,7 +493,7 @@ def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]": url, params_str = match.groups() params = params_str.split(";")[1:] - link: MultiDict[Union[str, URL]] = MultiDict() + link: MultiDict[str | URL] = MultiDict() for param in params: match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M) @@ -731,7 +716,7 @@ def get_encoding(self) -> str: return self._resolve_charset(self, self._body) - async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str: + async def text(self, encoding: str | None = None, errors: str = "strict") -> str: """Read response payload and decode.""" if self._body is None: await self.read() @@ -744,9 +729,9 @@ async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> async def json( self, *, - encoding: Optional[str] = None, + encoding: str | None = None, loads: JSONDecoder = DEFAULT_JSON_DECODER, - content_type: Optional[str] = "application/json", + content_type: str | None = "application/json", ) -> Any: """Read and decodes JSON response.""" if self._body is None: @@ -780,9 +765,9 @@ async def __aenter__(self) -> "ClientResponse": async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> None: self._in_context = False # similar to _RequestContextManager, we do not need to check @@ -808,7 +793,7 @@ class ClientRequest: } # Type of body depends on PAYLOAD_REGISTRY, which is dynamic. - _body: Union[None, payload.Payload] = None + _body: None | payload.Payload = None auth = None response = None @@ -834,26 +819,26 @@ def __init__( url: URL, *, params: Query = None, - headers: Optional[LooseHeaders] = None, - skip_auto_headers: Optional[Iterable[str]] = None, + headers: LooseHeaders | None = None, + skip_auto_headers: Iterable[str] | None = None, data: Any = None, - cookies: Optional[LooseCookies] = None, - auth: Optional[BasicAuth] = None, + cookies: LooseCookies | None = None, + auth: BasicAuth | None = None, version: http.HttpVersion = http.HttpVersion11, - compress: Union[str, bool, None] = None, - chunked: Optional[bool] = None, + compress: str | bool | None = None, + chunked: bool | None = None, expect100: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, - response_class: Optional[Type["ClientResponse"]] = None, - proxy: Optional[URL] = None, - proxy_auth: Optional[BasicAuth] = None, - timer: Optional[BaseTimerContext] = None, + loop: asyncio.AbstractEventLoop | None = None, + response_class: type["ClientResponse"] | None = None, + proxy: URL | None = None, + proxy_auth: BasicAuth | None = None, + timer: BaseTimerContext | None = None, session: Optional["ClientSession"] = None, - ssl: Union[SSLContext, bool, Fingerprint] = True, - proxy_headers: Optional[LooseHeaders] = None, - traces: Optional[List["Trace"]] = None, + ssl: SSLContext | bool | Fingerprint = True, + proxy_headers: LooseHeaders | None = None, + traces: list["Trace"] | None = None, trust_env: bool = False, - server_hostname: Optional[str] = None, + server_hostname: str | None = None, ): if loop is None: loop = asyncio.get_event_loop() @@ -884,7 +869,7 @@ def __init__( real_response_class = ClientResponse else: real_response_class = response_class - self.response_class: Type[ClientResponse] = real_response_class + self.response_class: type[ClientResponse] = real_response_class self._timer = timer if timer is not None else TimerNoop() self._ssl = ssl if ssl is not None else True self.server_hostname = server_hostname @@ -910,7 +895,7 @@ def __init__( def __reset_writer(self, _: object = None) -> None: self.__writer = None - def _get_content_length(self) -> Optional[int]: + def _get_content_length(self) -> int | None: """Extract and validate Content-Length header value. Returns parsed Content-Length value or None if not set. @@ -952,7 +937,7 @@ def ssl(self) -> Union["SSLContext", bool, Fingerprint]: @property def connection_key(self) -> ConnectionKey: if proxy_headers := self.proxy_headers: - h: Optional[int] = hash(tuple(proxy_headers.items())) + h: int | None = hash(tuple(proxy_headers.items())) else: h = None url = self.url @@ -976,11 +961,11 @@ def host(self) -> str: return ret @property - def port(self) -> Optional[int]: + def port(self) -> int | None: return self.url.port @property - def body(self) -> Union[payload.Payload, Literal[b""]]: + def body(self) -> payload.Payload | Literal[b""]: """Request body.""" # empty body is represented as bytes for backwards compatibility return self._body or b"" @@ -1040,7 +1025,7 @@ def update_host(self, url: URL) -> None: if url.raw_user or url.raw_password: self.auth = helpers.BasicAuth(url.user or "", url.password or "") - def update_version(self, version: Union[http.HttpVersion, str]) -> None: + def update_version(self, version: http.HttpVersion | str) -> None: """Convert request version to two elements tuple. parser HTTP version '1.1' => (1, 1) @@ -1055,7 +1040,7 @@ def update_version(self, version: Union[http.HttpVersion, str]) -> None: ) from None self.version = version - def update_headers(self, headers: Optional[LooseHeaders]) -> None: + def update_headers(self, headers: LooseHeaders | None) -> None: """Update request headers.""" self.headers: CIMultiDict[str] = CIMultiDict() @@ -1080,7 +1065,7 @@ def update_headers(self, headers: Optional[LooseHeaders]) -> None: else: self.headers.add(key, value) - def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> None: + def update_auto_headers(self, skip_auto_headers: Iterable[str] | None) -> None: if skip_auto_headers is not None: self._skip_auto_headers = CIMultiDict( (hdr, None) for hdr in sorted(skip_auto_headers) @@ -1099,7 +1084,7 @@ def update_auto_headers(self, skip_auto_headers: Optional[Iterable[str]]) -> Non if hdrs.USER_AGENT not in used_headers: self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE - def update_cookies(self, cookies: Optional[LooseCookies]) -> None: + def update_cookies(self, cookies: LooseCookies | None) -> None: """Update request cookies header.""" if not cookies: return @@ -1160,7 +1145,7 @@ def update_transfer_encoding(self) -> None: self.headers[hdrs.TRANSFER_ENCODING] = "chunked" - def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None: + def update_auth(self, auth: BasicAuth | None, trust_env: bool = False) -> None: """Set basic auth.""" if auth is None: auth = self.auth @@ -1311,9 +1296,9 @@ def update_expect_continue(self, expect: bool = False) -> None: def update_proxy( self, - proxy: Optional[URL], - proxy_auth: Optional[BasicAuth], - proxy_headers: Optional[LooseHeaders], + proxy: URL | None, + proxy_auth: BasicAuth | None, + proxy_headers: LooseHeaders | None, ) -> None: self.proxy = proxy if proxy is None: @@ -1335,7 +1320,7 @@ async def write_bytes( self, writer: AbstractStreamWriter, conn: "Connection", - content_length: Optional[int] = None, + content_length: int | None = None, ) -> None: """ Write the request body to the connection stream. @@ -1475,7 +1460,7 @@ async def send(self, conn: "Connection") -> "ClientResponse": # Buffer headers for potential coalescing with body await writer.write_headers(status_line, self.headers) - task: Optional["asyncio.Task[None]"] + task: asyncio.Task[None] | None if self._body or self._continue is not None or protocol.writing_paused: coro = self.write_bytes(writer, conn, self._get_content_length()) if sys.version_info >= (3, 12): diff --git a/aiohttp/client_ws.py b/aiohttp/client_ws.py index daa57d1930b..25b04cf12d6 100644 --- a/aiohttp/client_ws.py +++ b/aiohttp/client_ws.py @@ -3,7 +3,7 @@ import asyncio import sys from types import TracebackType -from typing import Any, Optional, Type, cast +from typing import Any, Optional, cast import attr @@ -48,14 +48,14 @@ def __init__( self, reader: WebSocketDataQueue, writer: WebSocketWriter, - protocol: Optional[str], + protocol: str | None, response: ClientResponse, timeout: ClientWSTimeout, autoclose: bool, autoping: bool, loop: asyncio.AbstractEventLoop, *, - heartbeat: Optional[float] = None, + heartbeat: float | None = None, compress: int = 0, client_notakeover: bool = False, ) -> None: @@ -67,23 +67,23 @@ def __init__( self._protocol = protocol self._closed = False self._closing = False - self._close_code: Optional[int] = None + self._close_code: int | None = None self._timeout = timeout self._autoclose = autoclose self._autoping = autoping self._heartbeat = heartbeat - self._heartbeat_cb: Optional[asyncio.TimerHandle] = None + self._heartbeat_cb: asyncio.TimerHandle | None = None self._heartbeat_when: float = 0.0 if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 - self._pong_response_cb: Optional[asyncio.TimerHandle] = None + self._pong_response_cb: asyncio.TimerHandle | None = None self._loop = loop self._waiting: bool = False - self._close_wait: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None + self._close_wait: asyncio.Future[None] | None = None + self._exception: BaseException | None = None self._compress = compress self._client_notakeover = client_notakeover - self._ping_task: Optional[asyncio.Task[None]] = None + self._ping_task: asyncio.Task[None] | None = None self._reset_heartbeat() @@ -199,11 +199,11 @@ def closed(self) -> bool: return self._closed @property - def close_code(self) -> Optional[int]: + def close_code(self) -> int | None: return self._close_code @property - def protocol(self) -> Optional[str]: + def protocol(self) -> str | None: return self._protocol @property @@ -224,7 +224,7 @@ def get_extra_info(self, name: str, default: Any = None) -> Any: return default return transport.get_extra_info(name, default) - def exception(self) -> Optional[BaseException]: + def exception(self) -> BaseException | None: return self._exception async def ping(self, message: bytes = b"") -> None: @@ -234,19 +234,19 @@ async def pong(self, message: bytes = b"") -> None: await self._writer.send_frame(message, WSMsgType.PONG) async def send_frame( - self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None + self, message: bytes, opcode: WSMsgType, compress: int | None = None ) -> None: """Send a frame over the websocket.""" await self._writer.send_frame(message, opcode, compress) - async def send_str(self, data: str, compress: Optional[int] = None) -> None: + async def send_str(self, data: str, compress: int | None = None) -> None: if not isinstance(data, str): raise TypeError("data argument must be str (%r)" % type(data)) await self._writer.send_frame( data.encode("utf-8"), WSMsgType.TEXT, compress=compress ) - async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: + async def send_bytes(self, data: bytes, compress: int | None = None) -> None: if not isinstance(data, (bytes, bytearray, memoryview)): raise TypeError("data argument must be byte-ish (%r)" % type(data)) await self._writer.send_frame(data, WSMsgType.BINARY, compress=compress) @@ -254,7 +254,7 @@ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: async def send_json( self, data: Any, - compress: Optional[int] = None, + compress: int | None = None, *, dumps: JSONEncoder = DEFAULT_JSON_ENCODER, ) -> None: @@ -309,7 +309,7 @@ async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bo self._response.close() return True - async def receive(self, timeout: Optional[float] = None) -> WSMessage: + async def receive(self, timeout: float | None = None) -> WSMessage: receive_timeout = timeout or self._timeout.ws_receive while True: @@ -382,7 +382,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return msg - async def receive_str(self, *, timeout: Optional[float] = None) -> str: + async def receive_str(self, *, timeout: float | None = None) -> str: msg = await self.receive(timeout) if msg.type is not WSMsgType.TEXT: raise WSMessageTypeError( @@ -390,7 +390,7 @@ async def receive_str(self, *, timeout: Optional[float] = None) -> str: ) return cast(str, msg.data) - async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + async def receive_bytes(self, *, timeout: float | None = None) -> bytes: msg = await self.receive(timeout) if msg.type is not WSMsgType.BINARY: raise WSMessageTypeError( @@ -402,7 +402,7 @@ async def receive_json( self, *, loads: JSONDecoder = DEFAULT_JSON_DECODER, - timeout: Optional[float] = None, + timeout: float | None = None, ) -> Any: data = await self.receive_str(timeout=timeout) return loads(data) @@ -421,8 +421,8 @@ async def __aenter__(self) -> "ClientWebSocketResponse": async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> None: await self.close() diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index cdede4244b4..18739ed70fc 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -2,7 +2,7 @@ import sys import zlib from concurrent.futures import Executor -from typing import Any, Final, Optional, Protocol, TypedDict, cast +from typing import Any, Final, Protocol, TypedDict, cast if sys.version_info >= (3, 12): from collections.abc import Buffer @@ -63,7 +63,7 @@ def compressobj( wbits: int = ..., memLevel: int = ..., strategy: int = ..., - zdict: Optional[Buffer] = ..., + zdict: Buffer | None = ..., ) -> ZLibCompressObjProtocol: ... def decompressobj( self, wbits: int = ..., zdict: Buffer = ... @@ -136,7 +136,7 @@ def set_zlib_backend(new_zlib_backend: ZLibBackendProtocol) -> None: def encoding_to_mode( - encoding: Optional[str] = None, + encoding: str | None = None, suppress_deflate_header: bool = False, ) -> int: if encoding == "gzip": @@ -149,8 +149,8 @@ class ZlibBaseHandler: def __init__( self, mode: int, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + executor: Executor | None = None, + max_sync_chunk_size: int | None = MAX_SYNC_CHUNK_SIZE, ): self._mode = mode self._executor = executor @@ -160,13 +160,13 @@ def __init__( class ZLibCompressor(ZlibBaseHandler): def __init__( self, - encoding: Optional[str] = None, + encoding: str | None = None, suppress_deflate_header: bool = False, - level: Optional[int] = None, - wbits: Optional[int] = None, - strategy: Optional[int] = None, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + level: int | None = None, + wbits: int | None = None, + strategy: int | None = None, + executor: Executor | None = None, + max_sync_chunk_size: int | None = MAX_SYNC_CHUNK_SIZE, ): super().__init__( mode=( @@ -214,7 +214,7 @@ async def compress(self, data: bytes) -> bytes: ) return self.compress_sync(data) - def flush(self, mode: Optional[int] = None) -> bytes: + def flush(self, mode: int | None = None) -> bytes: return self._compressor.flush( mode if mode is not None else self._zlib_backend.Z_FINISH ) @@ -223,10 +223,10 @@ def flush(self, mode: Optional[int] = None) -> bytes: class ZLibDecompressor(ZlibBaseHandler): def __init__( self, - encoding: Optional[str] = None, + encoding: str | None = None, suppress_deflate_header: bool = False, - executor: Optional[Executor] = None, - max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE, + executor: Executor | None = None, + max_sync_chunk_size: int | None = MAX_SYNC_CHUNK_SIZE, ): super().__init__( mode=encoding_to_mode(encoding, suppress_deflate_header), diff --git a/aiohttp/connector.py b/aiohttp/connector.py index 0fbacde3b42..abca2605601 100644 --- a/aiohttp/connector.py +++ b/aiohttp/connector.py @@ -6,30 +6,13 @@ import traceback import warnings from collections import OrderedDict, defaultdict, deque +from collections.abc import Awaitable, Callable, Iterator, Sequence from contextlib import suppress from http import HTTPStatus from itertools import chain, cycle, islice from time import monotonic from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - DefaultDict, - Deque, - Dict, - Iterator, - List, - Literal, - Optional, - Sequence, - Set, - Tuple, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Literal, Optional, Union, cast import aiohappyeyeballs from aiohappyeyeballs import AddrInfoType, SocketFactoryType @@ -133,7 +116,7 @@ def __del__(self) -> None: ) -async def _wait_for_close(waiters: List[Awaitable[object]]) -> None: +async def _wait_for_close(waiters: list[Awaitable[object]]) -> None: """Wait for all waiters to finish closing.""" results = await asyncio.gather(*waiters, return_exceptions=True) for res in results: @@ -155,8 +138,8 @@ def __init__( self._key = key self._connector = connector self._loop = loop - self._protocol: Optional[ResponseHandler] = protocol - self._callbacks: List[Callable[[], None]] = [] + self._protocol: ResponseHandler | None = protocol + self._callbacks: list[Callable[[], None]] = [] if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) @@ -190,13 +173,13 @@ def loop(self) -> asyncio.AbstractEventLoop: return self._loop @property - def transport(self) -> Optional[asyncio.Transport]: + def transport(self) -> asyncio.Transport | None: if self._protocol is None: return None return self._protocol.transport @property - def protocol(self) -> Optional[ResponseHandler]: + def protocol(self) -> ResponseHandler | None: return self._protocol def add_callback(self, callback: Callable[[], None]) -> None: @@ -254,7 +237,7 @@ class _TransportPlaceholder: __slots__ = ("closed", "transport") - def __init__(self, closed_future: asyncio.Future[Optional[Exception]]) -> None: + def __init__(self, closed_future: asyncio.Future[Exception | None]) -> None: """Initialize a placeholder for a transport.""" self.closed = closed_future self.transport = None @@ -292,12 +275,12 @@ class BaseConnector: def __init__( self, *, - keepalive_timeout: Union[object, None, float] = sentinel, + keepalive_timeout: object | None | float = sentinel, force_close: bool = False, limit: int = 100, limit_per_host: int = 0, enable_cleanup_closed: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, timeout_ceil_threshold: float = 5, ) -> None: @@ -320,13 +303,13 @@ def __init__( # Connection pool of reusable connections. # We use a deque to store connections because it has O(1) popleft() # and O(1) append() operations to implement a FIFO queue. - self._conns: DefaultDict[ - ConnectionKey, Deque[Tuple[ResponseHandler, float]] + self._conns: defaultdict[ + ConnectionKey, deque[tuple[ResponseHandler, float]] ] = defaultdict(deque) self._limit = limit self._limit_per_host = limit_per_host - self._acquired: Set[ResponseHandler] = set() - self._acquired_per_host: DefaultDict[ConnectionKey, Set[ResponseHandler]] = ( + self._acquired: set[ResponseHandler] = set() + self._acquired_per_host: defaultdict[ConnectionKey, set[ResponseHandler]] = ( defaultdict(set) ) self._keepalive_timeout = cast(float, keepalive_timeout) @@ -335,7 +318,7 @@ def __init__( # {host_key: FIFO list of waiters} # The FIFO is implemented with an OrderedDict with None keys because # python does not have an ordered set. - self._waiters: DefaultDict[ + self._waiters: defaultdict[ ConnectionKey, OrderedDict[asyncio.Future[None], None] ] = defaultdict(OrderedDict) @@ -343,10 +326,10 @@ def __init__( self._factory = functools.partial(ResponseHandler, loop=loop) # start keep-alive connection cleanup task - self._cleanup_handle: Optional[asyncio.TimerHandle] = None + self._cleanup_handle: asyncio.TimerHandle | None = None # start cleanup closed transports task - self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None + self._cleanup_closed_handle: asyncio.TimerHandle | None = None if enable_cleanup_closed and not NEEDS_CLEANUP_CLOSED: warnings.warn( @@ -359,8 +342,8 @@ def __init__( enable_cleanup_closed = False self._cleanup_closed_disabled = not enable_cleanup_closed - self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = [] - self._placeholder_future: asyncio.Future[Optional[Exception]] = ( + self._cleanup_closed_transports: list[asyncio.Transport | None] = [] + self._placeholder_future: asyncio.Future[Exception | None] = ( loop.create_future() ) self._placeholder_future.set_result(None) @@ -403,9 +386,9 @@ async def __aenter__(self) -> "BaseConnector": async def __aexit__( self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - exc_traceback: Optional[TracebackType] = None, + exc_type: type[BaseException] | None = None, + exc_value: BaseException | None = None, + exc_traceback: TracebackType | None = None, ) -> None: await self.close() @@ -447,7 +430,7 @@ def _cleanup(self) -> None: connections = defaultdict(deque) deadline = now - timeout for key, conns in self._conns.items(): - alive: Deque[Tuple[ResponseHandler, float]] = deque() + alive: deque[tuple[ResponseHandler, float]] = deque() for proto, use_time in conns: if proto.is_connected() and use_time - deadline >= 0: alive.append((proto, use_time)) @@ -514,8 +497,8 @@ def close(self, *, abort_ssl: bool = False) -> Awaitable[None]: task = self._loop.create_task(coro) return _DeprecationWaiter(task) - def _close(self, *, abort_ssl: bool = False) -> List[Awaitable[object]]: - waiters: List[Awaitable[object]] = [] + def _close(self, *, abort_ssl: bool = False) -> list[Awaitable[object]]: + waiters: list[Awaitable[object]] = [] if self._closed: return waiters @@ -610,7 +593,7 @@ def _available_connections(self, key: "ConnectionKey") -> int: return total_remain async def connect( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout" ) -> Connection: """Get from pool or create new connection.""" key = req.connection_key @@ -665,7 +648,7 @@ async def connect( return Connection(self, key, proto, self._loop) async def _wait_for_available_connection( - self, key: "ConnectionKey", traces: List["Trace"] + self, key: "ConnectionKey", traces: list["Trace"] ) -> None: """Wait for an available connection slot.""" # We loop here because there is a race between @@ -707,8 +690,8 @@ async def _wait_for_available_connection( attempts += 1 async def _get( - self, key: "ConnectionKey", traces: List["Trace"] - ) -> Optional[Connection]: + self, key: "ConnectionKey", traces: list["Trace"] + ) -> Connection | None: """Get next reusable connection for the key or None. The connection will be marked as acquired. @@ -820,27 +803,27 @@ def _release( ) async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout" ) -> ResponseHandler: raise NotImplementedError() class _DNSCacheTable: - def __init__(self, ttl: Optional[float] = None) -> None: - self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[ResolveResult], int]] = {} - self._timestamps: Dict[Tuple[str, int], float] = {} + def __init__(self, ttl: float | None = None) -> None: + self._addrs_rr: dict[tuple[str, int], tuple[Iterator[ResolveResult], int]] = {} + self._timestamps: dict[tuple[str, int], float] = {} self._ttl = ttl def __contains__(self, host: object) -> bool: return host in self._addrs_rr - def add(self, key: Tuple[str, int], addrs: List[ResolveResult]) -> None: + def add(self, key: tuple[str, int], addrs: list[ResolveResult]) -> None: self._addrs_rr[key] = (cycle(addrs), len(addrs)) if self._ttl is not None: self._timestamps[key] = monotonic() - def remove(self, key: Tuple[str, int]) -> None: + def remove(self, key: tuple[str, int]) -> None: self._addrs_rr.pop(key, None) if self._ttl is not None: @@ -850,14 +833,14 @@ def clear(self) -> None: self._addrs_rr.clear() self._timestamps.clear() - def next_addrs(self, key: Tuple[str, int]) -> List[ResolveResult]: + def next_addrs(self, key: tuple[str, int]) -> list[ResolveResult]: loop, length = self._addrs_rr[key] addrs = list(islice(loop, length)) # Consume one more element to shift internal state of `cycle` next(loop) return addrs - def expired(self, key: Tuple[str, int]) -> bool: + def expired(self, key: tuple[str, int]) -> bool: if self._ttl is None: return False @@ -940,25 +923,25 @@ def __init__( self, *, verify_ssl: bool = True, - fingerprint: Optional[bytes] = None, + fingerprint: bytes | None = None, use_dns_cache: bool = True, - ttl_dns_cache: Optional[int] = 10, + ttl_dns_cache: int | None = 10, family: socket.AddressFamily = socket.AddressFamily.AF_UNSPEC, - ssl_context: Optional[SSLContext] = None, - ssl: Union[bool, Fingerprint, SSLContext] = True, - local_addr: Optional[Tuple[str, int]] = None, - resolver: Optional[AbstractResolver] = None, - keepalive_timeout: Union[None, float, object] = sentinel, + ssl_context: SSLContext | None = None, + ssl: bool | Fingerprint | SSLContext = True, + local_addr: tuple[str, int] | None = None, + resolver: AbstractResolver | None = None, + keepalive_timeout: None | float | object = sentinel, force_close: bool = False, limit: int = 100, limit_per_host: int = 0, enable_cleanup_closed: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, timeout_ceil_threshold: float = 5, - happy_eyeballs_delay: Optional[float] = 0.25, - interleave: Optional[int] = None, - socket_factory: Optional[SocketFactoryType] = None, - ssl_shutdown_timeout: Union[_SENTINEL, None, float] = sentinel, + happy_eyeballs_delay: float | None = 0.25, + interleave: int | None = None, + socket_factory: SocketFactoryType | None = None, + ssl_shutdown_timeout: _SENTINEL | None | float = sentinel, ): super().__init__( keepalive_timeout=keepalive_timeout, @@ -982,16 +965,16 @@ def __init__( self._use_dns_cache = use_dns_cache self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache) - self._throttle_dns_futures: Dict[ - Tuple[str, int], Set["asyncio.Future[None]"] - ] = {} + self._throttle_dns_futures: dict[tuple[str, int], set[asyncio.Future[None]]] = ( + {} + ) self._family = family self._local_addr_infos = aiohappyeyeballs.addr_to_addr_infos(local_addr) self._happy_eyeballs_delay = happy_eyeballs_delay self._interleave = interleave - self._resolve_host_tasks: Set["asyncio.Task[List[ResolveResult]]"] = set() + self._resolve_host_tasks: set[asyncio.Task[list[ResolveResult]]] = set() self._socket_factory = socket_factory - self._ssl_shutdown_timeout: Optional[float] + self._ssl_shutdown_timeout: float | None # Handle ssl_shutdown_timeout with warning for Python < 3.11 if ssl_shutdown_timeout is sentinel: self._ssl_shutdown_timeout = 0 @@ -1015,7 +998,7 @@ def __init__( ) self._ssl_shutdown_timeout = ssl_shutdown_timeout - def _close(self, *, abort_ssl: bool = False) -> List[Awaitable[object]]: + def _close(self, *, abort_ssl: bool = False) -> list[Awaitable[object]]: """Close all ongoing DNS calls.""" for fut in chain.from_iterable(self._throttle_dns_futures.values()): fut.cancel() @@ -1053,9 +1036,7 @@ def use_dns_cache(self) -> bool: """True if local DNS caching is enabled.""" return self._use_dns_cache - def clear_dns_cache( - self, host: Optional[str] = None, port: Optional[int] = None - ) -> None: + def clear_dns_cache(self, host: str | None = None, port: int | None = None) -> None: """Remove specified host/port or clear all dns local cache.""" if host is not None and port is not None: self._cached_hosts.remove((host, port)) @@ -1065,8 +1046,8 @@ def clear_dns_cache( self._cached_hosts.clear() async def _resolve_host( - self, host: str, port: int, traces: Optional[Sequence["Trace"]] = None - ) -> List[ResolveResult]: + self, host: str, port: int, traces: Sequence["Trace"] | None = None + ) -> list[ResolveResult]: """Resolve host and return list of addresses.""" if is_ip_address(host): return [ @@ -1104,7 +1085,7 @@ async def _resolve_host( await trace.send_dns_cache_hit(host) return result - futures: Set["asyncio.Future[None]"] + futures: set[asyncio.Future[None]] # # If multiple connectors are resolving the same host, we wait # for the first one to resolve and then use the result for all of them. @@ -1148,7 +1129,7 @@ async def _resolve_host( return await asyncio.shield(resolved_host_task) except asyncio.CancelledError: - def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None: + def drop_exception(fut: "asyncio.Future[list[ResolveResult]]") -> None: with suppress(Exception, asyncio.CancelledError): fut.result() @@ -1157,12 +1138,12 @@ def drop_exception(fut: "asyncio.Future[List[ResolveResult]]") -> None: async def _resolve_host_with_throttle( self, - key: Tuple[str, int], + key: tuple[str, int], host: str, port: int, - futures: Set["asyncio.Future[None]"], - traces: Optional[Sequence["Trace"]], - ) -> List[ResolveResult]: + futures: set["asyncio.Future[None]"], + traces: Sequence["Trace"] | None, + ) -> list[ResolveResult]: """Resolve host and set result for all waiters. This method must be run in a task and shielded from cancellation @@ -1197,7 +1178,7 @@ async def _resolve_host_with_throttle( return self._cached_hosts.next_addrs(key) async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout" ) -> ResponseHandler: """Create connection. @@ -1210,7 +1191,7 @@ async def _create_connection( return proto - def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]: + def _get_ssl_context(self, req: ClientRequest) -> SSLContext | None: """Logic to get the correct SSL context 0. if req.ssl is false, return None @@ -1255,12 +1236,12 @@ def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]: async def _wrap_create_connection( self, *args: Any, - addr_infos: List[AddrInfoType], + addr_infos: list[AddrInfoType], req: ClientRequest, timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, + client_error: type[Exception] = ClientConnectorError, **kwargs: Any, - ) -> Tuple[asyncio.Transport, ResponseHandler]: + ) -> tuple[asyncio.Transport, ResponseHandler]: try: async with ceil_timeout( timeout.sock_connect, ceil_threshold=timeout.ceil_threshold @@ -1295,9 +1276,9 @@ async def _wrap_existing_connection( *args: Any, req: ClientRequest, timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, + client_error: type[Exception] = ClientConnectorError, **kwargs: Any, - ) -> Tuple[asyncio.Transport, ResponseHandler]: + ) -> tuple[asyncio.Transport, ResponseHandler]: try: async with ceil_timeout( timeout.sock_connect, ceil_threshold=timeout.ceil_threshold @@ -1403,8 +1384,8 @@ async def _start_tls_connection( underlying_transport: asyncio.Transport, req: ClientRequest, timeout: "ClientTimeout", - client_error: Type[Exception] = ClientConnectorError, - ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + client_error: type[Exception] = ClientConnectorError, + ) -> tuple[asyncio.BaseTransport, ResponseHandler]: """Wrap the raw TCP transport with TLS.""" tls_proto = self._factory() # Create a brand new proto for TLS sslcontext = self._get_ssl_context(req) @@ -1485,14 +1466,14 @@ async def _start_tls_connection( return tls_transport, tls_proto def _convert_hosts_to_addr_infos( - self, hosts: List[ResolveResult] - ) -> List[AddrInfoType]: + self, hosts: list[ResolveResult] + ) -> list[AddrInfoType]: """Converts the list of hosts to a list of addr_infos. The list of hosts is the result of a DNS lookup. The list of addr_infos is the result of a call to `socket.getaddrinfo()`. """ - addr_infos: List[AddrInfoType] = [] + addr_infos: list[AddrInfoType] = [] for hinfo in hosts: host = hinfo["host"] is_ipv6 = ":" in host @@ -1508,11 +1489,11 @@ def _convert_hosts_to_addr_infos( async def _create_direct_connection( self, req: ClientRequest, - traces: List["Trace"], + traces: list["Trace"], timeout: "ClientTimeout", *, - client_error: Type[Exception] = ClientConnectorError, - ) -> Tuple[asyncio.Transport, ResponseHandler]: + client_error: type[Exception] = ClientConnectorError, + ) -> tuple[asyncio.Transport, ResponseHandler]: sslcontext = self._get_ssl_context(req) fingerprint = self._get_fingerprint(req) @@ -1537,7 +1518,7 @@ async def _create_direct_connection( # it is problem of resolving proxy ip itself raise ClientConnectorDNSError(req.connection_key, exc) from exc - last_exc: Optional[Exception] = None + last_exc: Exception | None = None addr_infos = self._convert_hosts_to_addr_infos(hosts) while addr_infos: # Strip trailing dots, certificates contain FQDN without dots. @@ -1581,12 +1562,12 @@ async def _create_direct_connection( raise last_exc async def _create_proxy_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" - ) -> Tuple[asyncio.BaseTransport, ResponseHandler]: + self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout" + ) -> tuple[asyncio.BaseTransport, ResponseHandler]: self._fail_on_no_start_tls(req) runtime_has_start_tls = self._loop_supports_start_tls() - headers: Dict[str, str] = {} + headers: dict[str, str] = {} if req.proxy_headers is not None: headers = req.proxy_headers # type: ignore[assignment] headers[hdrs.HOST] = req.headers[hdrs.HOST] @@ -1725,10 +1706,10 @@ def __init__( self, path: str, force_close: bool = False, - keepalive_timeout: Union[object, float, None] = sentinel, + keepalive_timeout: object | float | None = sentinel, limit: int = 100, limit_per_host: int = 0, - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, ) -> None: super().__init__( force_close=force_close, @@ -1745,7 +1726,7 @@ def path(self) -> str: return self._path async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout" ) -> ResponseHandler: try: async with ceil_timeout( @@ -1783,10 +1764,10 @@ def __init__( self, path: str, force_close: bool = False, - keepalive_timeout: Union[object, float, None] = sentinel, + keepalive_timeout: object | float | None = sentinel, limit: int = 100, limit_per_host: int = 0, - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, ) -> None: super().__init__( force_close=force_close, @@ -1810,7 +1791,7 @@ def path(self) -> str: return self._path async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout" ) -> ResponseHandler: try: async with ceil_timeout( diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py index 193648d4309..016fae94d20 100644 --- a/aiohttp/cookiejar.py +++ b/aiohttp/cookiejar.py @@ -11,19 +11,9 @@ import time import warnings from collections import defaultdict -from collections.abc import Mapping +from collections.abc import Iterable, Iterator, Mapping from http.cookies import BaseCookie, Morsel, SimpleCookie -from typing import ( - DefaultDict, - Dict, - Iterable, - Iterator, - List, - Optional, - Set, - Tuple, - Union, -) +from typing import Union from yarl import URL @@ -74,10 +64,9 @@ class CookieJar(AbstractCookieJar): ) try: calendar.timegm(time.gmtime(MAX_TIME)) - except (OSError, ValueError): + except OSError: # Hit the maximum representable time on Windows # https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64 - # Throws ValueError on PyPy 3.9, OSError elsewhere MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1)) except OverflowError: # #4515: datetime.max may not be representable on 32-bit platforms @@ -90,17 +79,17 @@ def __init__( *, unsafe: bool = False, quote_cookie: bool = True, - treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, + treat_as_secure_origin: StrOrURL | list[StrOrURL] | None = None, + loop: asyncio.AbstractEventLoop | None = None, ) -> None: super().__init__(loop=loop) - self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict( + self._cookies: defaultdict[tuple[str, str], SimpleCookie] = defaultdict( SimpleCookie ) - self._morsel_cache: DefaultDict[Tuple[str, str], Dict[str, Morsel[str]]] = ( + self._morsel_cache: defaultdict[tuple[str, str], dict[str, Morsel[str]]] = ( defaultdict(dict) ) - self._host_only_cookies: Set[Tuple[str, str]] = set() + self._host_only_cookies: set[tuple[str, str]] = set() self._unsafe = unsafe self._quote_cookie = quote_cookie if treat_as_secure_origin is None: @@ -115,8 +104,8 @@ def __init__( for url in treat_as_secure_origin ] self._treat_as_secure_origin = treat_as_secure_origin - self._expire_heap: List[Tuple[float, Tuple[str, str, str]]] = [] - self._expirations: Dict[Tuple[str, str, str], float] = {} + self._expire_heap: list[tuple[float, tuple[str, str, str]]] = [] + self._expirations: dict[tuple[str, str, str], float] = {} @property def quote_cookie(self) -> bool: @@ -132,7 +121,7 @@ def load(self, file_path: PathLike) -> None: with file_path.open(mode="rb") as f: self._cookies = pickle.load(f) - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + def clear(self, predicate: ClearCookiePredicate | None = None) -> None: if predicate is None: self._expire_heap.clear() self._cookies.clear() @@ -197,7 +186,7 @@ def _do_expiration(self) -> None: heapq.heapify(self._expire_heap) now = time.time() - to_del: List[Tuple[str, str, str]] = [] + to_del: list[tuple[str, str, str]] = [] # Find any expired cookies and add them to the to-delete list while self._expire_heap: when, cookie_key = self._expire_heap[0] @@ -214,7 +203,7 @@ def _do_expiration(self) -> None: if to_del: self._delete_cookies(to_del) - def _delete_cookies(self, to_del: List[Tuple[str, str, str]]) -> None: + def _delete_cookies(self, to_del: list[tuple[str, str, str]]) -> None: for domain, path, name in to_del: self._host_only_cookies.discard((domain, name)) self._cookies[(domain, path)].pop(name, None) @@ -424,7 +413,7 @@ def _is_domain_match(domain: str, hostname: str) -> bool: return not is_ip_address(hostname) @classmethod - def _parse_date(cls, date_str: str) -> Optional[int]: + def _parse_date(cls, date_str: str) -> int | None: """Implements date string parsing adhering to RFC 6265.""" if not date_str: return None @@ -495,7 +484,7 @@ class DummyCookieJar(AbstractCookieJar): """ - def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + def __init__(self, *, loop: asyncio.AbstractEventLoop | None = None) -> None: super().__init__(loop=loop) def __iter__(self) -> "Iterator[Morsel[str]]": @@ -509,7 +498,7 @@ def __len__(self) -> int: def quote_cookie(self) -> bool: return True - def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None: + def clear(self, predicate: ClearCookiePredicate | None = None) -> None: pass def clear_domain(self, domain: str) -> None: diff --git a/aiohttp/formdata.py b/aiohttp/formdata.py index a5a4f603e19..869a966a712 100644 --- a/aiohttp/formdata.py +++ b/aiohttp/formdata.py @@ -1,6 +1,7 @@ import io import warnings -from typing import Any, Iterable, List, Optional +from collections.abc import Iterable +from typing import Any from urllib.parse import urlencode from multidict import MultiDict, MultiDictProxy @@ -22,12 +23,12 @@ def __init__( self, fields: Iterable[Any] = (), quote_fields: bool = True, - charset: Optional[str] = None, + charset: str | None = None, *, default_to_multipart: bool = False, ) -> None: self._writer = multipart.MultipartWriter("form-data") - self._fields: List[Any] = [] + self._fields: list[Any] = [] self._is_multipart = default_to_multipart self._quote_fields = quote_fields self._charset = charset @@ -47,9 +48,9 @@ def add_field( name: str, value: Any, *, - content_type: Optional[str] = None, - filename: Optional[str] = None, - content_transfer_encoding: Optional[str] = None, + content_type: str | None = None, + filename: str | None = None, + content_transfer_encoding: str | None = None, ) -> None: if isinstance(value, io.IOBase): @@ -116,7 +117,7 @@ def add_fields(self, *fields: Any) -> None: raise TypeError( "Only io.IOBase, multidict and (name, file) " "pairs allowed, use .add_field() for passing " - "more complex parameters, got {!r}".format(rec) + f"more complex parameters, got {rec!r}" ) def _gen_form_urlencoded(self) -> payload.BytesPayload: diff --git a/aiohttp/hdrs.py b/aiohttp/hdrs.py index c8d6b35f33a..b64b62ee7f2 100644 --- a/aiohttp/hdrs.py +++ b/aiohttp/hdrs.py @@ -3,7 +3,7 @@ # After changing the file content call ./tools/gen.py # to regenerate the headers parser import itertools -from typing import Final, Set +from typing import Final from multidict import istr @@ -18,7 +18,7 @@ METH_PUT: Final[str] = "PUT" METH_TRACE: Final[str] = "TRACE" -METH_ALL: Final[Set[str]] = { +METH_ALL: Final[set[str]] = { METH_CONNECT, METH_HEAD, METH_GET, diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index ace4f0e9b53..8d008d1b224 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -16,6 +16,7 @@ import time import weakref from collections import namedtuple +from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from contextlib import suppress from email.parser import HeaderParser from email.utils import parsedate @@ -24,21 +25,11 @@ from types import MappingProxyType, TracebackType from typing import ( Any, - Callable, ContextManager, - Dict, - Generator, Generic, - Iterable, - Iterator, - List, - Mapping, Optional, Protocol, - Tuple, - Type, TypeVar, - Union, get_args, overload, ) @@ -63,7 +54,6 @@ IS_MACOS = platform.system() == "Darwin" IS_WINDOWS = platform.system() == "Windows" -PY_310 = sys.version_info >= (3, 10) PY_311 = sys.version_info >= (3, 11) @@ -182,7 +172,7 @@ def encode(self) -> str: return "Basic %s" % base64.b64encode(creds).decode(self.encoding) -def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: +def strip_auth_from_url(url: URL) -> tuple[URL, BasicAuth | None]: """Remove user and password from URL if present and return BasicAuth object.""" # Check raw_user and raw_password first as yarl is likely # to already have these values parsed from the netloc in the cache. @@ -191,7 +181,7 @@ def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: return url.with_user(None), BasicAuth(url.user or "", url.password or "") -def netrc_from_env() -> Optional[netrc.netrc]: +def netrc_from_env() -> netrc.netrc | None: """Load netrc from file. Attempt to load it from the path specified by the env-var @@ -237,10 +227,10 @@ def netrc_from_env() -> Optional[netrc.netrc]: @attr.s(auto_attribs=True, frozen=True, slots=True) class ProxyInfo: proxy: URL - proxy_auth: Optional[BasicAuth] + proxy_auth: BasicAuth | None -def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth: +def basicauth_from_netrc(netrc_obj: netrc.netrc | None, host: str) -> BasicAuth: """ Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``. @@ -269,7 +259,7 @@ def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAu return BasicAuth(username, password) -def proxies_from_env() -> Dict[str, ProxyInfo]: +def proxies_from_env() -> dict[str, ProxyInfo]: proxy_urls = { k: URL(v) for k, v in getproxies().items() @@ -295,7 +285,7 @@ def proxies_from_env() -> Dict[str, ProxyInfo]: return ret -def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]: +def get_env_proxy_for_url(url: URL) -> tuple[URL, BasicAuth | None]: """Get a permitted proxy for the given URL from the env.""" if url.host is not None and proxy_bypass(url.host): raise LookupError(f"Proxying is disallowed for `{url.host!r}`") @@ -358,7 +348,7 @@ def parse_mimetype(mimetype: str) -> MimeType: @functools.lru_cache(maxsize=56) -def parse_content_type(raw: str) -> Tuple[str, MappingProxyType[str, str]]: +def parse_content_type(raw: str) -> tuple[str, MappingProxyType[str, str]]: """Parse Content-Type header. Returns a tuple of the parsed content type and a @@ -371,7 +361,7 @@ def parse_content_type(raw: str) -> Tuple[str, MappingProxyType[str, str]]: return content_type, MappingProxyType(content_dict) -def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]: +def guess_filename(obj: Any, default: str | None = None) -> str | None: name = getattr(obj, "name", None) if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">": return Path(name).name @@ -446,7 +436,7 @@ def content_disposition_header( return value -def is_ip_address(host: Optional[str]) -> bool: +def is_ip_address(host: str | None) -> bool: """Check if host looks like an IP Address. This check is only meant as a heuristic to ensure that @@ -459,7 +449,7 @@ def is_ip_address(host: Optional[str]) -> bool: return ":" in host or host.replace(".", "").isdigit() -_cached_current_datetime: Optional[int] = None +_cached_current_datetime: int | None = None _cached_formatted_datetime = "" @@ -503,7 +493,7 @@ def rfc822_formatted_time() -> str: return _cached_formatted_datetime -def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None: +def _weakref_handle(info: "tuple[weakref.ref[object], str]") -> None: ref, name = info ob = ref() if ob is not None: @@ -517,7 +507,7 @@ def weakref_handle( timeout: float, loop: asyncio.AbstractEventLoop, timeout_ceil_threshold: float = 5, -) -> Optional[asyncio.TimerHandle]: +) -> asyncio.TimerHandle | None: if timeout is not None and timeout > 0: when = loop.time() + timeout if timeout >= timeout_ceil_threshold: @@ -532,7 +522,7 @@ def call_later( timeout: float, loop: asyncio.AbstractEventLoop, timeout_ceil_threshold: float = 5, -) -> Optional[asyncio.TimerHandle]: +) -> asyncio.TimerHandle | None: if timeout is None or timeout <= 0: return None now = loop.time() @@ -560,14 +550,14 @@ class TimeoutHandle: def __init__( self, loop: asyncio.AbstractEventLoop, - timeout: Optional[float], + timeout: float | None, ceil_threshold: float = 5, ) -> None: self._timeout = timeout self._loop = loop self._ceil_threshold = ceil_threshold - self._callbacks: List[ - Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]] + self._callbacks: list[ + tuple[Callable[..., None], tuple[Any, ...], dict[str, Any]] ] = [] def register( @@ -578,7 +568,7 @@ def register( def close(self) -> None: self._callbacks.clear() - def start(self) -> Optional[asyncio.TimerHandle]: + def start(self) -> asyncio.TimerHandle | None: timeout = self._timeout if timeout is not None and timeout > 0: when = self._loop.time() + timeout @@ -621,9 +611,9 @@ def __enter__(self) -> BaseTimerContext: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> None: return @@ -635,7 +625,7 @@ class TimerContext(BaseTimerContext): def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop = loop - self._tasks: List[asyncio.Task[Any]] = [] + self._tasks: list[asyncio.Task[Any]] = [] self._cancelled = False self._cancelling = 0 @@ -663,11 +653,11 @@ def __enter__(self) -> BaseTimerContext: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Optional[bool]: - enter_task: Optional[asyncio.Task[Any]] = None + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool | None: + enter_task: asyncio.Task[Any] | None = None if self._tasks: enter_task = self._tasks.pop() @@ -694,7 +684,7 @@ def timeout(self) -> None: def ceil_timeout( - delay: Optional[float], ceil_threshold: float = 5 + delay: float | None, ceil_threshold: float = 5 ) -> async_timeout.Timeout: if delay is None or delay <= 0: return async_timeout.timeout(None) @@ -713,11 +703,11 @@ class HeadersMixin: ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"]) _headers: MultiMapping[str] - _content_type: Optional[str] = None - _content_dict: Optional[Dict[str, str]] = None - _stored_content_type: Union[str, None, _SENTINEL] = sentinel + _content_type: str | None = None + _content_dict: dict[str, str] | None = None + _stored_content_type: str | None | _SENTINEL = sentinel - def _parse_content_type(self, raw: Optional[str]) -> None: + def _parse_content_type(self, raw: str | None) -> None: self._stored_content_type = raw if raw is None: # default value according to RFC 2616 @@ -739,7 +729,7 @@ def content_type(self) -> str: return self._content_type @property - def charset(self) -> Optional[str]: + def charset(self) -> str | None: """The value of charset part for Content-Type HTTP header.""" raw = self._headers.get(hdrs.CONTENT_TYPE) if self._stored_content_type != raw: @@ -748,7 +738,7 @@ def charset(self) -> Optional[str]: return self._content_dict.get("charset") @property - def content_length(self) -> Optional[int]: + def content_length(self) -> int | None: """The value of Content-Length HTTP header.""" content_length = self._headers.get(hdrs.CONTENT_LENGTH) return None if content_length is None else int(content_length) @@ -802,9 +792,9 @@ class AppKey(Generic[_T]): # This may be set by Python when instantiating with a generic type. We need to # support this, in order to support types that are not concrete classes, # like Iterable, which can't be passed as the second parameter to __init__. - __orig_class__: Type[object] + __orig_class__: type[object] - def __init__(self, name: str, t: Optional[Type[_T]] = None): + def __init__(self, name: str, t: type[_T] | None = None): # Prefix with module name to help deduplicate key names. frame = inspect.currentframe() while frame: @@ -840,16 +830,15 @@ def __repr__(self) -> str: return f"" -class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]): +class ChainMapProxy(Mapping[str | AppKey[Any], Any]): __slots__ = ("_maps",) - def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None: + def __init__(self, maps: Iterable[Mapping[str | AppKey[Any], Any]]) -> None: self._maps = tuple(maps) def __init_subclass__(cls) -> None: raise TypeError( - "Inheritance class {} from ChainMapProxy " - "is forbidden".format(cls.__name__) + f"Inheritance class {cls.__name__} from ChainMapProxy " "is forbidden" ) @overload # type: ignore[override] @@ -858,7 +847,7 @@ def __getitem__(self, key: AppKey[_T]) -> _T: ... @overload def __getitem__(self, key: str) -> Any: ... - def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: + def __getitem__(self, key: str | AppKey[_T]) -> Any: for mapping in self._maps: try: return mapping[key] @@ -867,15 +856,15 @@ def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: raise KeyError(key) @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]: ... + def get(self, key: AppKey[_T], default: _S) -> _T | _S: ... @overload - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... + def get(self, key: AppKey[_T], default: None = ...) -> _T | None: ... @overload def get(self, key: str, default: Any = ...) -> Any: ... - def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: + def get(self, key: str | AppKey[_T], default: Any = None) -> Any: try: return self[key] except KeyError: @@ -885,8 +874,8 @@ def __len__(self) -> int: # reuses stored hash values if possible return len(set().union(*self._maps)) - def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: - d: Dict[Union[str, AppKey[Any]], Any] = {} + def __iter__(self) -> Iterator[str | AppKey[Any]]: + d: dict[str | AppKey[Any], Any] = {} for mapping in reversed(self._maps): # reuses stored hash values if possible d.update(mapping) @@ -926,7 +915,7 @@ def validate_etag_value(value: str) -> None: ) -def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]: +def parse_http_date(date_str: str | None) -> datetime.datetime | None: """Process a date string, return a datetime object""" if date_str is not None: timetuple = parsedate(date_str) diff --git a/aiohttp/http.py b/aiohttp/http.py index a1feae2d9b8..ed7158d5f51 100644 --- a/aiohttp/http.py +++ b/aiohttp/http.py @@ -1,6 +1,6 @@ import sys +from collections.abc import Mapping from http import HTTPStatus -from typing import Mapping, Tuple from . import __version__ from .http_exceptions import HttpProcessingError as HttpProcessingError @@ -63,10 +63,10 @@ ) -SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format( - sys.version_info, __version__ +SERVER_SOFTWARE: str = ( + f"Python/{sys.version_info[0]}.{sys.version_info[1]} aiohttp/{__version__}" ) -RESPONSES: Mapping[int, Tuple[str, str]] = { +RESPONSES: Mapping[int, tuple[str, str]] = { v: (v.phrase, v.description) for v in HTTPStatus.__members__.values() } diff --git a/aiohttp/http_exceptions.py b/aiohttp/http_exceptions.py index 773830211e6..f8ea08162c8 100644 --- a/aiohttp/http_exceptions.py +++ b/aiohttp/http_exceptions.py @@ -1,7 +1,6 @@ """Low-level http related exceptions.""" from textwrap import indent -from typing import Optional, Union from .typedefs import _CIMultiDict @@ -25,9 +24,9 @@ class HttpProcessingError(Exception): def __init__( self, *, - code: Optional[int] = None, + code: int | None = None, message: str = "", - headers: Optional[_CIMultiDict] = None, + headers: _CIMultiDict | None = None, ) -> None: if code is not None: self.code = code @@ -47,7 +46,7 @@ class BadHttpMessage(HttpProcessingError): code = 400 message = "Bad Request" - def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None: + def __init__(self, message: str, *, headers: _CIMultiDict | None = None) -> None: super().__init__(message=message, headers=headers) self.args = (message,) @@ -85,7 +84,7 @@ def __init__( class InvalidHeader(BadHttpMessage): - def __init__(self, hdr: Union[bytes, str]) -> None: + def __init__(self, hdr: bytes | str) -> None: hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr super().__init__(f"Invalid HTTP header: {hdr!r}") self.hdr = hdr_s @@ -93,7 +92,7 @@ def __init__(self, hdr: Union[bytes, str]) -> None: class BadStatusLine(BadHttpMessage): - def __init__(self, line: str = "", error: Optional[str] = None) -> None: + def __init__(self, line: str = "", error: str | None = None) -> None: if not isinstance(line, str): line = repr(line) super().__init__(error or f"Bad status line {line!r}") @@ -104,7 +103,7 @@ def __init__(self, line: str = "", error: Optional[str] = None) -> None: class BadHttpMethod(BadStatusLine): """Invalid HTTP method in status line.""" - def __init__(self, line: str = "", error: Optional[str] = None) -> None: + def __init__(self, line: str = "", error: str | None = None) -> None: super().__init__(line, error or f"Bad HTTP method in status line {line!r}") diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index f862088850f..d1126b6ea9e 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -4,22 +4,8 @@ import string from contextlib import suppress from enum import IntEnum -from typing import ( - Any, - ClassVar, - Final, - Generic, - List, - Literal, - NamedTuple, - Optional, - Pattern, - Set, - Tuple, - Type, - TypeVar, - Union, -) +from re import Pattern +from typing import Any, ClassVar, Final, Generic, Literal, NamedTuple, TypeVar from multidict import CIMultiDict, CIMultiDictProxy, istr from yarl import URL @@ -68,7 +54,7 @@ _SEP = Literal[b"\r\n", b"\n"] -ASCIISET: Final[Set[str]] = set(string.printable) +ASCIISET: Final[set[str]] = set(string.printable) # See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview # and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens @@ -91,7 +77,7 @@ class RawRequestMessage(NamedTuple): headers: "CIMultiDictProxy[str]" raw_headers: RawHeaders should_close: bool - compression: Optional[str] + compression: str | None upgrade: bool chunked: bool url: URL @@ -104,7 +90,7 @@ class RawResponseMessage(NamedTuple): headers: CIMultiDictProxy[str] raw_headers: RawHeaders should_close: bool - compression: Optional[str] + compression: str | None upgrade: bool chunked: bool @@ -142,8 +128,8 @@ def __init__( self._lax = lax def parse_headers( - self, lines: List[bytes] - ) -> Tuple["CIMultiDictProxy[str]", RawHeaders]: + self, lines: list[bytes] + ) -> tuple["CIMultiDictProxy[str]", RawHeaders]: headers: CIMultiDict[str] = CIMultiDict() # note: "raw" does not mean inclusion of OWS before/after the field value raw_headers = [] @@ -246,16 +232,16 @@ class HttpParser(abc.ABC, Generic[_MsgT]): def __init__( self, - protocol: Optional[BaseProtocol] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, + protocol: BaseProtocol | None = None, + loop: asyncio.AbstractEventLoop | None = None, limit: int = 2**16, max_line_size: int = 8190, max_headers: int = 32768, max_field_size: int = 8190, - timer: Optional[BaseTimerContext] = None, - code: Optional[int] = None, - method: Optional[str] = None, - payload_exception: Optional[Type[BaseException]] = None, + timer: BaseTimerContext | None = None, + code: int | None = None, + method: str | None = None, + payload_exception: type[BaseException] | None = None, response_with_body: bool = True, read_until_eof: bool = False, auto_decompress: bool = True, @@ -272,11 +258,11 @@ def __init__( self.response_with_body = response_with_body self.read_until_eof = read_until_eof - self._lines: List[bytes] = [] + self._lines: list[bytes] = [] self._tail = b"" self._upgraded = False self._payload = None - self._payload_parser: Optional[HttpPayloadParser] = None + self._payload_parser: HttpPayloadParser | None = None self._auto_decompress = auto_decompress self._limit = limit self._headers_parser = HeadersParser( @@ -284,12 +270,12 @@ def __init__( ) @abc.abstractmethod - def parse_message(self, lines: List[bytes]) -> _MsgT: ... + def parse_message(self, lines: list[bytes]) -> _MsgT: ... @abc.abstractmethod def _is_chunked_te(self, te: str) -> bool: ... - def feed_eof(self) -> Optional[_MsgT]: + def feed_eof(self) -> _MsgT | None: if self._payload_parser is not None: self._payload_parser.feed_eof() self._payload_parser = None @@ -313,7 +299,7 @@ def feed_data( CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH, METH_CONNECT: str = hdrs.METH_CONNECT, SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1, - ) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]: + ) -> tuple[list[tuple[_MsgT, StreamReader]], bool, bytes]: messages = [] @@ -354,7 +340,7 @@ def feed_data( finally: self._lines.clear() - def get_content_length() -> Optional[int]: + def get_content_length() -> int | None: # payload length length_hdr = msg.headers.get(CONTENT_LENGTH) if length_hdr is None: @@ -503,9 +489,9 @@ def get_content_length() -> Optional[int]: return messages, self._upgraded, data def parse_headers( - self, lines: List[bytes] - ) -> Tuple[ - "CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool + self, lines: list[bytes] + ) -> tuple[ + "CIMultiDictProxy[str]", RawHeaders, bool | None, str | None, bool, bool ]: """Parses RFC 5322 headers from a stream. @@ -584,7 +570,7 @@ class HttpRequestParser(HttpParser[RawRequestMessage]): Returns RawRequestMessage. """ - def parse_message(self, lines: List[bytes]) -> RawRequestMessage: + def parse_message(self, lines: list[bytes]) -> RawRequestMessage: # request line line = lines[0].decode("utf-8", "surrogateescape") try: @@ -689,15 +675,15 @@ class HttpResponseParser(HttpParser[RawResponseMessage]): def feed_data( self, data: bytes, - SEP: Optional[_SEP] = None, + SEP: _SEP | None = None, *args: Any, **kwargs: Any, - ) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]: + ) -> tuple[list[tuple[RawResponseMessage, StreamReader]], bool, bytes]: if SEP is None: SEP = b"\r\n" if DEBUG else b"\n" return super().feed_data(data, SEP, *args, **kwargs) - def parse_message(self, lines: List[bytes]) -> RawResponseMessage: + def parse_message(self, lines: list[bytes]) -> RawResponseMessage: line = lines[0].decode("utf-8", "surrogateescape") try: version, status = line.split(maxsplit=1) @@ -769,11 +755,11 @@ class HttpPayloadParser: def __init__( self, payload: StreamReader, - length: Optional[int] = None, + length: int | None = None, chunked: bool = False, - compression: Optional[str] = None, - code: Optional[int] = None, - method: Optional[str] = None, + compression: str | None = None, + code: int | None = None, + method: str | None = None, response_with_body: bool = True, auto_decompress: bool = True, lax: bool = False, @@ -793,7 +779,7 @@ def __init__( # payload decompression wrapper if response_with_body and compression and self._auto_decompress: - real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer( + real_payload: StreamReader | DeflateBuffer = DeflateBuffer( payload, compression ) else: @@ -830,7 +816,7 @@ def feed_eof(self) -> None: def feed_data( self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";" - ) -> Tuple[bool, bytes]: + ) -> tuple[bool, bytes]: # Read specified amount of bytes if self._type == ParseState.PARSE_LENGTH: required = self._length @@ -961,14 +947,14 @@ class DeflateBuffer: decompressor: Any - def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: + def __init__(self, out: StreamReader, encoding: str | None) -> None: self.out = out self.size = 0 out.total_compressed_bytes = self.size self.encoding = encoding self._started_decoding = False - self.decompressor: Union[BrotliDecompressor, ZLibDecompressor, ZSTDDecompressor] + self.decompressor: BrotliDecompressor | ZLibDecompressor | ZSTDDecompressor if encoding == "br": if not HAS_BROTLI: # pragma: no cover raise ContentEncodingError( diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py index a140b218b25..8393d4a7c5e 100644 --- a/aiohttp/http_writer.py +++ b/aiohttp/http_writer.py @@ -51,10 +51,10 @@ class HttpVersion(NamedTuple): class StreamWriter(AbstractStreamWriter): - length: Optional[int] = None + length: int | None = None chunked: bool = False _eof: bool = False - _compress: Optional[ZLibCompressor] = None + _compress: ZLibCompressor | None = None def __init__( self, @@ -67,11 +67,11 @@ def __init__( self.loop = loop self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent self._on_headers_sent: _T_OnHeadersSent = on_headers_sent - self._headers_buf: Optional[bytes] = None + self._headers_buf: bytes | None = None self._headers_written: bool = False @property - def transport(self) -> Optional[asyncio.Transport]: + def transport(self) -> asyncio.Transport | None: return self._protocol.transport @property @@ -82,11 +82,11 @@ def enable_chunking(self) -> None: self.chunked = True def enable_compression( - self, encoding: str = "deflate", strategy: Optional[int] = None + self, encoding: str = "deflate", strategy: int | None = None ) -> None: self._compress = ZLibCompressor(encoding=encoding, strategy=strategy) - def _write(self, chunk: Union[bytes, bytearray, memoryview]) -> None: + def _write(self, chunk: bytes | bytearray | memoryview) -> None: size = len(chunk) self.buffer_size += size self.output_size += size @@ -154,7 +154,7 @@ def _send_headers_with_payload( async def write( self, - chunk: Union[bytes, bytearray, memoryview], + chunk: bytes | bytearray | memoryview, *, drain: bool = True, LIMIT: int = 0x10000, @@ -269,7 +269,7 @@ async def write_eof(self, chunk: bytes = b"") -> None: # Handle body/compression if self._compress: - chunks: List[bytes] = [] + chunks: list[bytes] = [] chunks_len = 0 if chunk and (compressed_chunk := await self._compress.compress(chunk)): chunks_len = len(compressed_chunk) diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py index 3464b1c2307..e643976ce4c 100644 --- a/aiohttp/multipart.py +++ b/aiohttp/multipart.py @@ -6,21 +6,9 @@ import uuid import warnings from collections import deque -from collections.abc import Mapping, Sequence +from collections.abc import Iterator, Mapping, Sequence from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Deque, - Dict, - Iterator, - List, - Optional, - Tuple, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Union, cast from urllib.parse import parse_qsl, unquote, urlencode from multidict import CIMultiDict, CIMultiDictProxy @@ -78,8 +66,8 @@ class BadContentDispositionParam(RuntimeWarning): def parse_content_disposition( - header: Optional[str], -) -> Tuple[Optional[str], Dict[str, str]]: + header: str | None, +) -> tuple[str | None, dict[str, str]]: def is_token(string: str) -> bool: return bool(string) and TOKEN >= set(string) @@ -110,7 +98,7 @@ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: warnings.warn(BadContentDispositionHeader(header)) return None, {} - params: Dict[str, str] = {} + params: dict[str, str] = {} while parts: item = parts.pop(0) @@ -182,7 +170,7 @@ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str: def content_disposition_filename( params: Mapping[str, str], name: str = "filename" -) -> Optional[str]: +) -> str | None: name_suf = "%s*" % name if not params: return None @@ -245,7 +233,7 @@ def at_eof(self) -> bool: async def next( self, - ) -> Optional[Union["MultipartReader", "BodyPartReader"]]: + ) -> Union["MultipartReader", "BodyPartReader"] | None: """Emits next multipart reader object.""" item = await self.stream.next() if self.stream.at_eof(): @@ -272,7 +260,7 @@ def __init__( content: StreamReader, *, subtype: str = "mixed", - default_charset: Optional[str] = None, + default_charset: str | None = None, ) -> None: self.headers = headers self._boundary = boundary @@ -285,10 +273,10 @@ def __init__( length = None if self._is_form_data else self.headers.get(CONTENT_LENGTH, None) self._length = int(length) if length is not None else None self._read_bytes = 0 - self._unread: Deque[bytes] = deque() - self._prev_chunk: Optional[bytes] = None + self._unread: deque[bytes] = deque() + self._prev_chunk: bytes | None = None self._content_eof = 0 - self._cache: Dict[str, Any] = {} + self._cache: dict[str, Any] = {} def __aiter__(self: Self) -> Self: return self @@ -299,7 +287,7 @@ async def __anext__(self) -> bytes: raise StopAsyncIteration return part - async def next(self) -> Optional[bytes]: + async def next(self) -> bytes | None: item = await self.read() if not item: return None @@ -458,7 +446,7 @@ async def release(self) -> None: while not self._at_eof: await self.read_chunk(self.chunk_size) - async def text(self, *, encoding: Optional[str] = None) -> str: + async def text(self, *, encoding: str | None = None) -> str: """Like read(), but assumes that body part contains text data.""" data = await self.read(decode=True) # see https://www.w3.org/TR/html5/forms.html#multipart/form-data-encoding-algorithm @@ -466,15 +454,15 @@ async def text(self, *, encoding: Optional[str] = None) -> str: encoding = encoding or self.get_charset(default="utf-8") return data.decode(encoding) - async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, Any]]: + async def json(self, *, encoding: str | None = None) -> dict[str, Any] | None: """Like read(), but assumes that body parts contains JSON data.""" data = await self.read(decode=True) if not data: return None encoding = encoding or self.get_charset(default="utf-8") - return cast(Dict[str, Any], json.loads(data.decode(encoding))) + return cast(dict[str, Any], json.loads(data.decode(encoding))) - async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]: + async def form(self, *, encoding: str | None = None) -> list[tuple[str, str]]: """Like read(), but assumes that body parts contain form urlencoded data.""" data = await self.read(decode=True) if not data: @@ -542,7 +530,7 @@ def get_charset(self, default: str) -> str: return mimetype.parameters.get("charset", self._default_charset or default) @reify - def name(self) -> Optional[str]: + def name(self) -> str | None: """Returns name specified in Content-Disposition header. If the header is missing or malformed, returns None. @@ -551,7 +539,7 @@ def name(self) -> Optional[str]: return content_disposition_filename(params, "name") @reify - def filename(self) -> Optional[str]: + def filename(self) -> str | None: """Returns filename specified in Content-Disposition header. Returns None if the header is missing or malformed. @@ -568,7 +556,7 @@ class BodyPartReaderPayload(Payload): def __init__(self, value: BodyPartReader, *args: Any, **kwargs: Any) -> None: super().__init__(value, *args, **kwargs) - params: Dict[str, str] = {} + params: dict[str, str] = {} if value.name is not None: params["name"] = value.name if value.filename is not None: @@ -605,7 +593,7 @@ class MultipartReader: response_wrapper_cls = MultipartResponseWrapper #: Multipart reader class, used to handle multipart/* body parts. #: None points to type(self) - multipart_reader_cls: Optional[Type["MultipartReader"]] = None + multipart_reader_cls: type["MultipartReader"] | None = None #: Body part reader class for non multipart/* content types. part_reader_cls = BodyPartReader @@ -620,18 +608,18 @@ def __init__(self, headers: Mapping[str, str], content: StreamReader) -> None: self.headers = headers self._boundary = ("--" + self._get_boundary()).encode() self._content = content - self._default_charset: Optional[str] = None - self._last_part: Optional[Union["MultipartReader", BodyPartReader]] = None + self._default_charset: str | None = None + self._last_part: MultipartReader | BodyPartReader | None = None self._at_eof = False self._at_bof = True - self._unread: List[bytes] = [] + self._unread: list[bytes] = [] def __aiter__(self: Self) -> Self: return self async def __anext__( self, - ) -> Optional[Union["MultipartReader", BodyPartReader]]: + ) -> Union["MultipartReader", BodyPartReader] | None: part = await self.next() if part is None: raise StopAsyncIteration @@ -657,7 +645,7 @@ def at_eof(self) -> bool: async def next( self, - ) -> Optional[Union["MultipartReader", BodyPartReader]]: + ) -> Union["MultipartReader", BodyPartReader] | None: """Emits the next multipart body part.""" # So, if we're at BOF, we need to skip till the boundary. if self._at_eof: @@ -801,7 +789,7 @@ async def _maybe_release_last_part(self) -> None: self._last_part = None -_Part = Tuple[Payload, str, str] +_Part = tuple[Payload, str, str] class MultipartWriter(Payload): @@ -811,7 +799,7 @@ class MultipartWriter(Payload): # _consumed = False (inherited) - Can be encoded multiple times _autoclose = True # No file handles, just collects parts in memory - def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> None: + def __init__(self, subtype: str = "mixed", boundary: str | None = None) -> None: boundary = boundary if boundary is not None else uuid.uuid4().hex # The underlying Payload API demands a str (utf-8), not bytes, # so we need to ensure we don't lose anything during conversion. @@ -826,7 +814,7 @@ def __init__(self, subtype: str = "mixed", boundary: Optional[str] = None) -> No super().__init__(None, content_type=ctype) - self._parts: List[_Part] = [] + self._parts: list[_Part] = [] self._is_form_data = subtype == "form-data" def __enter__(self) -> "MultipartWriter": @@ -834,9 +822,9 @@ def __enter__(self) -> "MultipartWriter": def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> None: pass @@ -888,7 +876,7 @@ def _boundary_value(self) -> str: def boundary(self) -> str: return self._boundary.decode("ascii") - def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Payload: + def append(self, obj: Any, headers: Mapping[str, str] | None = None) -> Payload: if headers is None: headers = CIMultiDict() @@ -905,8 +893,8 @@ def append(self, obj: Any, headers: Optional[Mapping[str, str]] = None) -> Paylo def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" - encoding: Optional[str] = None - te_encoding: Optional[str] = None + encoding: str | None = None + te_encoding: str | None = None if self._is_form_data: # https://datatracker.ietf.org/doc/html/rfc7578#section-4.7 # https://datatracker.ietf.org/doc/html/rfc7578#section-4.8 @@ -942,7 +930,7 @@ def append_payload(self, payload: Payload) -> Payload: return payload def append_json( - self, obj: Any, headers: Optional[Mapping[str, str]] = None + self, obj: Any, headers: Mapping[str, str] | None = None ) -> Payload: """Helper to append JSON part.""" if headers is None: @@ -952,8 +940,8 @@ def append_json( def append_form( self, - obj: Union[Sequence[Tuple[str, str]], Mapping[str, str]], - headers: Optional[Mapping[str, str]] = None, + obj: Sequence[tuple[str, str]] | Mapping[str, str], + headers: Mapping[str, str] | None = None, ) -> Payload: """Helper to append form urlencoded part.""" assert isinstance(obj, (Sequence, Mapping)) @@ -972,7 +960,7 @@ def append_form( ) @property - def size(self) -> Optional[int]: + def size(self) -> int | None: """Size of the payload.""" total = 0 for part, encoding, te_encoding in self._parts: @@ -1012,7 +1000,7 @@ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> byt This method is async-safe and calls as_bytes on underlying payloads. """ - parts: List[bytes] = [] + parts: list[bytes] = [] # Process each part for part, _e, _te in self._parts: @@ -1090,9 +1078,9 @@ async def close(self) -> None: class MultipartPayloadWriter: def __init__(self, writer: Any) -> None: self._writer = writer - self._encoding: Optional[str] = None - self._compress: Optional[ZLibCompressor] = None - self._encoding_buffer: Optional[bytearray] = None + self._encoding: str | None = None + self._compress: ZLibCompressor | None = None + self._encoding_buffer: bytearray | None = None def enable_encoding(self, encoding: str) -> None: if encoding == "base64": @@ -1102,7 +1090,7 @@ def enable_encoding(self, encoding: str) -> None: self._encoding = "quoted-printable" def enable_compression( - self, encoding: str = "deflate", strategy: Optional[int] = None + self, encoding: str = "deflate", strategy: int | None = None ) -> None: self._compress = ZLibCompressor( encoding=encoding, diff --git a/aiohttp/payload.py b/aiohttp/payload.py index 5b88fa09405..98347d8a885 100644 --- a/aiohttp/payload.py +++ b/aiohttp/payload.py @@ -9,20 +9,7 @@ from abc import ABC, abstractmethod from collections.abc import Iterable from itertools import chain -from typing import ( - IO, - TYPE_CHECKING, - Any, - Dict, - Final, - List, - Optional, - Set, - TextIO, - Tuple, - Type, - Union, -) +from typing import IO, TYPE_CHECKING, Any, Final, TextIO from multidict import CIMultiDict @@ -56,7 +43,7 @@ TOO_LARGE_BYTES_BODY: Final[int] = 2**20 # 1 MB READ_SIZE: Final[int] = 2**16 # 64 KB -_CLOSE_FUTURES: Set[asyncio.Future[None]] = set() +_CLOSE_FUTURES: set[asyncio.Future[None]] = set() class LookupError(Exception): @@ -74,7 +61,7 @@ def get_payload(data: Any, *args: Any, **kwargs: Any) -> "Payload": def register_payload( - factory: Type["Payload"], type: Any, *, order: Order = Order.normal + factory: type["Payload"], type: Any, *, order: Order = Order.normal ) -> None: PAYLOAD_REGISTRY.register(factory, type, order=order) @@ -84,13 +71,13 @@ def __init__(self, type: Any, *, order: Order = Order.normal) -> None: self.type = type self.order = order - def __call__(self, factory: Type["Payload"]) -> Type["Payload"]: + def __call__(self, factory: type["Payload"]) -> type["Payload"]: register_payload(factory, self.type, order=self.order) return factory -PayloadType = Type["Payload"] -_PayloadRegistryItem = Tuple[PayloadType, Any] +PayloadType = type["Payload"] +_PayloadRegistryItem = tuple[PayloadType, Any] class PayloadRegistry: @@ -102,16 +89,16 @@ class PayloadRegistry: __slots__ = ("_first", "_normal", "_last", "_normal_lookup") def __init__(self) -> None: - self._first: List[_PayloadRegistryItem] = [] - self._normal: List[_PayloadRegistryItem] = [] - self._last: List[_PayloadRegistryItem] = [] - self._normal_lookup: Dict[Any, PayloadType] = {} + self._first: list[_PayloadRegistryItem] = [] + self._normal: list[_PayloadRegistryItem] = [] + self._last: list[_PayloadRegistryItem] = [] + self._normal_lookup: dict[Any, PayloadType] = {} def get( self, data: Any, *args: Any, - _CHAIN: "Type[chain[_PayloadRegistryItem]]" = chain, + _CHAIN: "type[chain[_PayloadRegistryItem]]" = chain, **kwargs: Any, ) -> "Payload": if self._first: @@ -151,19 +138,19 @@ def register( class Payload(ABC): _default_content_type: str = "application/octet-stream" - _size: Optional[int] = None + _size: int | None = None _consumed: bool = False # Default: payload has not been consumed yet _autoclose: bool = False # Default: assume resource needs explicit closing def __init__( self, value: Any, - headers: Optional[ - Union[_CIMultiDict, Dict[str, str], Iterable[Tuple[str, str]]] - ] = None, - content_type: Union[str, None, _SENTINEL] = sentinel, - filename: Optional[str] = None, - encoding: Optional[str] = None, + headers: ( + _CIMultiDict | dict[str, str] | Iterable[tuple[str, str]] | None + ) = None, + content_type: str | None | _SENTINEL = sentinel, + filename: str | None = None, + encoding: str | None = None, **kwargs: Any, ) -> None: self._encoding = encoding @@ -187,7 +174,7 @@ def __init__( self._headers.update(headers) @property - def size(self) -> Optional[int]: + def size(self) -> int | None: """Size of the payload in bytes. Returns the number of bytes that will be transmitted when the payload @@ -197,7 +184,7 @@ def size(self) -> Optional[int]: return self._size @property - def filename(self) -> Optional[str]: + def filename(self) -> str | None: """Filename of the payload.""" return self._filename @@ -216,7 +203,7 @@ def _binary_headers(self) -> bytes: ) @property - def encoding(self) -> Optional[str]: + def encoding(self) -> str | None: """Payload encoding""" return self._encoding @@ -283,7 +270,7 @@ async def write(self, writer: AbstractStreamWriter) -> None: # write_with_length is new in aiohttp 3.12 # it should be overridden by subclasses async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] + self, writer: AbstractStreamWriter, content_length: int | None ) -> None: """ Write payload with a specific content length constraint. @@ -353,7 +340,7 @@ class BytesPayload(Payload): _autoclose = True # No file handle, just bytes in memory def __init__( - self, value: Union[bytes, bytearray, memoryview], *args: Any, **kwargs: Any + self, value: bytes | bytearray | memoryview, *args: Any, **kwargs: Any ) -> None: if "content_type" not in kwargs: kwargs["content_type"] = "application/octet-stream" @@ -407,7 +394,7 @@ async def write(self, writer: AbstractStreamWriter) -> None: await writer.write(self._value) async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] + self, writer: AbstractStreamWriter, content_length: int | None ) -> None: """ Write bytes payload with a specific content length constraint. @@ -432,8 +419,8 @@ def __init__( self, value: str, *args: Any, - encoding: Optional[str] = None, - content_type: Optional[str] = None, + encoding: str | None = None, + content_type: str | None = None, **kwargs: Any, ) -> None: @@ -466,7 +453,7 @@ def __init__(self, value: IO[str], *args: Any, **kwargs: Any) -> None: class IOBasePayload(Payload): _value: io.IOBase # _consumed = False (inherited) - File can be re-read from the same position - _start_position: Optional[int] = None + _start_position: int | None = None # _autoclose = False (inherited) - Has file handle that needs explicit closing def __init__( @@ -496,8 +483,8 @@ def _set_or_restore_start_position(self) -> None: self._consumed = True def _read_and_available_len( - self, remaining_content_len: Optional[int] - ) -> Tuple[Optional[int], bytes]: + self, remaining_content_len: int | None + ) -> tuple[int | None, bytes]: """ Read the file-like object and return both its total size and the first chunk. @@ -521,7 +508,7 @@ def _read_and_available_len( min(READ_SIZE, size or READ_SIZE, remaining_content_len or READ_SIZE) ) - def _read(self, remaining_content_len: Optional[int]) -> bytes: + def _read(self, remaining_content_len: int | None) -> bytes: """ Read a chunk of data from the file-like object. @@ -540,7 +527,7 @@ def _read(self, remaining_content_len: Optional[int]) -> bytes: return self._value.read(remaining_content_len or READ_SIZE) # type: ignore[no-any-return] @property - def size(self) -> Optional[int]: + def size(self) -> int | None: """ Size of the payload in bytes. @@ -586,7 +573,7 @@ async def write(self, writer: AbstractStreamWriter) -> None: await self.write_with_length(writer, None) async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] + self, writer: AbstractStreamWriter, content_length: int | None ) -> None: """ Write file-like payload with a specific content length constraint. @@ -648,9 +635,9 @@ async def write_with_length( def _should_stop_writing( self, - available_len: Optional[int], + available_len: int | None, total_written_len: int, - remaining_content_len: Optional[int], + remaining_content_len: int | None, ) -> bool: """ Determine if we should stop writing data. @@ -736,8 +723,8 @@ def __init__( self, value: TextIO, *args: Any, - encoding: Optional[str] = None, - content_type: Optional[str] = None, + encoding: str | None = None, + content_type: str | None = None, **kwargs: Any, ) -> None: @@ -761,8 +748,8 @@ def __init__( ) def _read_and_available_len( - self, remaining_content_len: Optional[int] - ) -> Tuple[Optional[int], bytes]: + self, remaining_content_len: int | None + ) -> tuple[int | None, bytes]: """ Read the text file-like object and return both its total size and the first chunk. @@ -791,7 +778,7 @@ def _read_and_available_len( ) return size, chunk.encode(self._encoding) if self._encoding else chunk.encode() - def _read(self, remaining_content_len: Optional[int]) -> bytes: + def _read(self, remaining_content_len: int | None) -> bytes: """ Read a chunk of data from the text file-like object. @@ -869,7 +856,7 @@ async def write(self, writer: AbstractStreamWriter) -> None: return await self.write_with_length(writer, None) async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] + self, writer: AbstractStreamWriter, content_length: int | None ) -> None: """ Write BytesIO payload with a specific content length constraint. @@ -957,7 +944,7 @@ def __init__( if TYPE_CHECKING: - from typing import AsyncIterable, AsyncIterator + from collections.abc import AsyncIterable, AsyncIterator _AsyncIterator = AsyncIterator[bytes] _AsyncIterable = AsyncIterable[bytes] @@ -970,9 +957,9 @@ def __init__( class AsyncIterablePayload(Payload): - _iter: Optional[_AsyncIterator] = None + _iter: _AsyncIterator | None = None _value: _AsyncIterable - _cached_chunks: Optional[List[bytes]] = None + _cached_chunks: list[bytes] | None = None # _consumed stays False to allow reuse with cached content _autoclose = True # Iterator doesn't need explicit closing @@ -981,7 +968,7 @@ def __init__(self, value: _AsyncIterable, *args: Any, **kwargs: Any) -> None: raise TypeError( "value argument must support " "collections.abc.AsyncIterable interface, " - "got {!r}".format(type(value)) + f"got {type(value)!r}" ) if "content_type" not in kwargs: @@ -1009,7 +996,7 @@ async def write(self, writer: AbstractStreamWriter) -> None: await self.write_with_length(writer, None) async def write_with_length( - self, writer: AbstractStreamWriter, content_length: Optional[int] + self, writer: AbstractStreamWriter, content_length: int | None ) -> None: """ Write async iterable payload with a specific content length constraint. @@ -1048,10 +1035,7 @@ async def write_with_length( try: while True: - if sys.version_info >= (3, 10): - chunk = await anext(self._iter) - else: - chunk = await self._iter.__anext__() + chunk = await anext(self._iter) if remaining_bytes is None: await writer.write(chunk) # If we have a content length limit @@ -1089,7 +1073,7 @@ async def as_bytes(self, encoding: str = "utf-8", errors: str = "strict") -> byt return b"" # Read all chunks and cache them - chunks: List[bytes] = [] + chunks: list[bytes] = [] async for chunk in self._iter: chunks.append(chunk) diff --git a/aiohttp/payload_streamer.py b/aiohttp/payload_streamer.py index 831fdc0a77f..67afedf2bc1 100644 --- a/aiohttp/payload_streamer.py +++ b/aiohttp/payload_streamer.py @@ -24,7 +24,8 @@ async def file_sender(writer, file_name=None): import types import warnings -from typing import Any, Awaitable, Callable, Dict, Tuple +from collections.abc import Awaitable, Callable +from typing import Any from .abc import AbstractStreamWriter from .payload import Payload, payload_type @@ -36,8 +37,8 @@ class _stream_wrapper: def __init__( self, coro: Callable[..., Awaitable[None]], - args: Tuple[Any, ...], - kwargs: Dict[str, Any], + args: tuple[Any, ...], + kwargs: dict[str, Any], ) -> None: self.coro = types.coroutine(coro) self.args = args diff --git a/aiohttp/pytest_plugin.py b/aiohttp/pytest_plugin.py index 7d59fe820d6..7ea6be2f335 100644 --- a/aiohttp/pytest_plugin.py +++ b/aiohttp/pytest_plugin.py @@ -2,17 +2,8 @@ import contextlib import inspect import warnings -from typing import ( - Any, - Awaitable, - Callable, - Dict, - Iterator, - Optional, - Protocol, - Union, - overload, -) +from collections.abc import Awaitable, Callable, Iterator +from typing import Any, Protocol, overload import pytest @@ -41,7 +32,7 @@ async def __call__( self, __param: Application, *, - server_kwargs: Optional[Dict[str, Any]] = None, + server_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> TestClient[Request, Application]: ... @overload @@ -49,20 +40,20 @@ async def __call__( self, __param: BaseTestServer, *, - server_kwargs: Optional[Dict[str, Any]] = None, + server_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> TestClient[BaseRequest, None]: ... class AiohttpServer(Protocol): def __call__( - self, app: Application, *, port: Optional[int] = None, **kwargs: Any + self, app: Application, *, port: int | None = None, **kwargs: Any ) -> Awaitable[TestServer]: ... class AiohttpRawServer(Protocol): def __call__( - self, handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + self, handler: _RequestHandler, *, port: int | None = None, **kwargs: Any ) -> Awaitable[RawTestServer]: ... @@ -168,7 +159,7 @@ def _runtime_warning_context(): # type: ignore[no-untyped-def] with warnings.catch_warnings(record=True) as _warnings: yield rw = [ - "{w.filename}:{w.lineno}:{w.message}".format(w=w) + f"{w.filename}:{w.lineno}:{w.message}" for w in _warnings if w.category == RuntimeWarning ] @@ -308,7 +299,7 @@ async def go( app: Application, *, host: str = "127.0.0.1", - port: Optional[int] = None, + port: int | None = None, **kwargs: Any, ) -> TestServer: server = TestServer(app, host=host, port=port) @@ -344,7 +335,7 @@ def aiohttp_raw_server(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpRawSe servers = [] async def go( - handler: _RequestHandler, *, port: Optional[int] = None, **kwargs: Any + handler: _RequestHandler, *, port: int | None = None, **kwargs: Any ) -> RawTestServer: server = RawTestServer(handler, port=port) await server.start_server(loop=loop, **kwargs) @@ -386,7 +377,7 @@ def aiohttp_client(loop: asyncio.AbstractEventLoop) -> Iterator[AiohttpClient]: async def go( __param: Application, *, - server_kwargs: Optional[Dict[str, Any]] = None, + server_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> TestClient[Request, Application]: ... @@ -394,14 +385,14 @@ async def go( async def go( __param: BaseTestServer, *, - server_kwargs: Optional[Dict[str, Any]] = None, + server_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> TestClient[BaseRequest, None]: ... async def go( - __param: Union[Application, BaseTestServer], + __param: Application | BaseTestServer, *args: Any, - server_kwargs: Optional[Dict[str, Any]] = None, + server_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> TestClient[Any, Any]: if isinstance(__param, Callable) and not isinstance( # type: ignore[arg-type] diff --git a/aiohttp/resolver.py b/aiohttp/resolver.py index b20e5672ce5..85c36da0c31 100644 --- a/aiohttp/resolver.py +++ b/aiohttp/resolver.py @@ -1,7 +1,7 @@ import asyncio import socket import weakref -from typing import Any, Dict, Final, List, Optional, Tuple, Type, Union +from typing import Any, Final, Optional from .abc import AbstractResolver, ResolveResult @@ -31,12 +31,12 @@ class ThreadedResolver(AbstractResolver): concurrent.futures.ThreadPoolExecutor is used by default. """ - def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: + def __init__(self, loop: asyncio.AbstractEventLoop | None = None) -> None: self._loop = loop or asyncio.get_running_loop() async def resolve( self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET - ) -> List[ResolveResult]: + ) -> list[ResolveResult]: infos = await self._loop.getaddrinfo( host, port, @@ -45,7 +45,7 @@ async def resolve( flags=_AI_ADDRCONFIG, ) - hosts: List[ResolveResult] = [] + hosts: list[ResolveResult] = [] for family, _, proto, _, address in infos: if family == socket.AF_INET6: if len(address) < 3: @@ -87,7 +87,7 @@ class AsyncResolver(AbstractResolver): def __init__( self, - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, *args: Any, **kwargs: Any, ) -> None: @@ -95,7 +95,7 @@ def __init__( raise RuntimeError("Resolver requires aiodns library") self._loop = loop or asyncio.get_running_loop() - self._manager: Optional[_DNSResolverManager] = None + self._manager: _DNSResolverManager | None = None # If custom args are provided, create a dedicated resolver instance # This means each AsyncResolver with custom args gets its own # aiodns.DNSResolver instance @@ -112,7 +112,7 @@ def __init__( async def resolve( self, host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET - ) -> List[ResolveResult]: + ) -> list[ResolveResult]: try: resp = await self._resolver.getaddrinfo( host, @@ -124,9 +124,9 @@ async def resolve( except aiodns.error.DNSError as exc: msg = exc.args[1] if len(exc.args) >= 1 else "DNS lookup failed" raise OSError(None, msg) from exc - hosts: List[ResolveResult] = [] + hosts: list[ResolveResult] = [] for node in resp.nodes: - address: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] = node.addr + address: tuple[bytes, int] | tuple[bytes, int, int, int] = node.addr family = node.family if family == socket.AF_INET6: if len(address) > 3 and address[3]: @@ -163,7 +163,7 @@ async def resolve( async def _resolve_with_query( self, host: str, port: int = 0, family: int = socket.AF_INET - ) -> List[Dict[str, Any]]: + ) -> list[dict[str, Any]]: qtype: Final = "AAAA" if family == socket.AF_INET6 else "A" try: @@ -222,7 +222,7 @@ def _init(self) -> None: # Use WeakKeyDictionary to allow event loops to be garbage collected self._loop_data: weakref.WeakKeyDictionary[ asyncio.AbstractEventLoop, - tuple["aiodns.DNSResolver", weakref.WeakSet["AsyncResolver"]], + tuple[aiodns.DNSResolver, weakref.WeakSet[AsyncResolver]], ] = weakref.WeakKeyDictionary() def get_resolver( @@ -238,7 +238,7 @@ def get_resolver( # Create a new resolver and client set for this loop if it doesn't exist if loop not in self._loop_data: resolver = aiodns.DNSResolver(loop=loop) - client_set: weakref.WeakSet["AsyncResolver"] = weakref.WeakSet() + client_set: weakref.WeakSet[AsyncResolver] = weakref.WeakSet() self._loop_data[loop] = (resolver, client_set) else: # Get the existing resolver and client set @@ -270,5 +270,5 @@ def release_resolver( del self._loop_data[loop] -_DefaultType = Type[Union[AsyncResolver, ThreadedResolver]] +_DefaultType = type[AsyncResolver | ThreadedResolver] DefaultResolver: _DefaultType = AsyncResolver if aiodns_default else ThreadedResolver diff --git a/aiohttp/streams.py b/aiohttp/streams.py index e2bc04dd99c..6c147953d0d 100644 --- a/aiohttp/streams.py +++ b/aiohttp/streams.py @@ -1,17 +1,8 @@ import asyncio import collections import warnings -from typing import ( - Awaitable, - Callable, - Deque, - Final, - Generic, - List, - Optional, - Tuple, - TypeVar, -) +from collections.abc import Awaitable, Callable +from typing import Final, Generic, TypeVar from .base_protocol import BaseProtocol from .helpers import ( @@ -67,7 +58,7 @@ def __init__(self, stream: "StreamReader") -> None: def __aiter__(self) -> "ChunkTupleAsyncStreamIterator": return self - async def __anext__(self) -> Tuple[bytes, bool]: + async def __anext__(self) -> tuple[bytes, bool]: rv = await self._stream.readchunk() if rv == (b"", False): raise StopAsyncIteration @@ -138,8 +129,8 @@ def __init__( protocol: BaseProtocol, limit: int, *, - timer: Optional[BaseTimerContext] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, + timer: BaseTimerContext | None = None, + loop: asyncio.AbstractEventLoop | None = None, ) -> None: self._protocol = protocol self._low_water = limit @@ -149,18 +140,18 @@ def __init__( self._loop = loop self._size = 0 self._cursor = 0 - self._http_chunk_splits: Optional[List[int]] = None - self._buffer: Deque[bytes] = collections.deque() + self._http_chunk_splits: list[int] | None = None + self._buffer: collections.deque[bytes] = collections.deque() self._buffer_offset = 0 self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._eof_waiter: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None + self._waiter: asyncio.Future[None] | None = None + self._eof_waiter: asyncio.Future[None] | None = None + self._exception: BaseException | None = None self._timer = TimerNoop() if timer is None else timer - self._eof_callbacks: List[Callable[[], None]] = [] + self._eof_callbacks: list[Callable[[], None]] = [] self._eof_counter = 0 self.total_bytes = 0 - self.total_compressed_bytes: Optional[int] = None + self.total_compressed_bytes: int | None = None def __repr__(self) -> str: info = [self.__class__.__name__] @@ -176,10 +167,10 @@ def __repr__(self) -> str: info.append("e=%r" % self._exception) return "<%s>" % " ".join(info) - def get_read_buffer_limits(self) -> Tuple[int, int]: + def get_read_buffer_limits(self) -> tuple[int, int]: return (self._low_water, self._high_water) - def exception(self) -> Optional[BaseException]: + def exception(self) -> BaseException | None: return self._exception def set_exception( @@ -449,7 +440,7 @@ async def readany(self) -> bytes: return self._read_nowait(-1) - async def readchunk(self) -> Tuple[bytes, bool]: + async def readchunk(self) -> tuple[bytes, bool]: """Returns a tuple of (data, end_of_http_chunk). When chunked transfer @@ -487,7 +478,7 @@ async def readexactly(self, n: int) -> bytes: if self._exception is not None: raise self._exception - blocks: List[bytes] = [] + blocks: list[bytes] = [] while n > 0: block = await self.read(n) if not block: @@ -568,7 +559,7 @@ def __init__(self) -> None: def __repr__(self) -> str: return "<%s>" % self.__class__.__name__ - def exception(self) -> Optional[BaseException]: + def exception(self) -> BaseException | None: return None def set_exception( @@ -610,7 +601,7 @@ async def read(self, n: int = -1) -> bytes: async def readany(self) -> bytes: return b"" - async def readchunk(self) -> Tuple[bytes, bool]: + async def readchunk(self) -> tuple[bytes, bool]: if not self._read_eof_chunk: self._read_eof_chunk = True return (b"", False) @@ -633,9 +624,9 @@ class DataQueue(Generic[_T]): def __init__(self, loop: asyncio.AbstractEventLoop) -> None: self._loop = loop self._eof = False - self._waiter: Optional[asyncio.Future[None]] = None - self._exception: Optional[BaseException] = None - self._buffer: Deque[Tuple[_T, int]] = collections.deque() + self._waiter: asyncio.Future[None] | None = None + self._exception: BaseException | None = None + self._buffer: collections.deque[tuple[_T, int]] = collections.deque() def __len__(self) -> int: return len(self._buffer) @@ -646,7 +637,7 @@ def is_eof(self) -> bool: def at_eof(self) -> bool: return self._eof and not self._buffer - def exception(self) -> Optional[BaseException]: + def exception(self) -> BaseException | None: return self._exception def set_exception( diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py index 87c31427867..7b1b7103cef 100644 --- a/aiohttp/test_utils.py +++ b/aiohttp/test_utils.py @@ -10,20 +10,9 @@ import sys import warnings from abc import ABC, abstractmethod +from collections.abc import Callable, Iterator from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Generic, - Iterator, - List, - Optional, - Type, - TypeVar, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast, overload from unittest import IsolatedAsyncioTestCase, mock from aiosignal import Signal @@ -61,7 +50,7 @@ if TYPE_CHECKING: from ssl import SSLContext else: - SSLContext = None + SSLContext = Any if sys.version_info >= (3, 11) and TYPE_CHECKING: from typing import Unpack @@ -110,9 +99,9 @@ def __init__( self, *, scheme: str = "", - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, host: str = "127.0.0.1", - port: Optional[int] = None, + port: int | None = None, skip_url_asserts: bool = False, socket_factory: Callable[ [str, int, socket.AddressFamily], socket.socket @@ -120,8 +109,8 @@ def __init__( **kwargs: Any, ) -> None: self._loop = loop - self.runner: Optional[BaseRunner] = None - self._root: Optional[URL] = None + self.runner: BaseRunner | None = None + self._root: URL | None = None self.host = host self.port = port self._closed = False @@ -130,7 +119,7 @@ def __init__( self.socket_factory = socket_factory async def start_server( - self, loop: Optional[asyncio.AbstractEventLoop] = None, **kwargs: Any + self, loop: asyncio.AbstractEventLoop | None = None, **kwargs: Any ) -> None: if self.runner: return @@ -215,9 +204,9 @@ def __enter__(self) -> None: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, ) -> None: # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover @@ -228,9 +217,9 @@ async def __aenter__(self) -> "BaseTestServer": async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc_value: Optional[BaseException], - traceback: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_value: BaseException | None, + traceback: TracebackType | None, ) -> None: await self.close() @@ -242,7 +231,7 @@ def __init__( *, scheme: str = "", host: str = "127.0.0.1", - port: Optional[int] = None, + port: int | None = None, **kwargs: Any, ): self.app = app @@ -259,7 +248,7 @@ def __init__( *, scheme: str = "", host: str = "127.0.0.1", - port: Optional[int] = None, + port: int | None = None, **kwargs: Any, ) -> None: self._handler = handler @@ -285,7 +274,7 @@ def __init__( self: "TestClient[Request, Application]", server: TestServer, *, - cookie_jar: Optional[AbstractCookieJar] = None, + cookie_jar: AbstractCookieJar | None = None, **kwargs: Any, ) -> None: ... @overload @@ -293,15 +282,15 @@ def __init__( self: "TestClient[_Request, None]", server: BaseTestServer, *, - cookie_jar: Optional[AbstractCookieJar] = None, + cookie_jar: AbstractCookieJar | None = None, **kwargs: Any, ) -> None: ... def __init__( self, server: BaseTestServer, *, - cookie_jar: Optional[AbstractCookieJar] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, + cookie_jar: AbstractCookieJar | None = None, + loop: asyncio.AbstractEventLoop | None = None, **kwargs: Any, ) -> None: if not isinstance(server, BaseTestServer): @@ -315,8 +304,8 @@ def __init__( self._session = ClientSession(loop=loop, cookie_jar=cookie_jar, **kwargs) self._session._retry_connection = False self._closed = False - self._responses: List[ClientResponse] = [] - self._websockets: List[ClientWebSocketResponse] = [] + self._responses: list[ClientResponse] = [] + self._websockets: list[ClientWebSocketResponse] = [] async def start_server(self) -> None: await self._server.start_server(loop=self._loop) @@ -326,7 +315,7 @@ def host(self) -> str: return self._server.host @property - def port(self) -> Optional[int]: + def port(self) -> int | None: return self._server.port @property @@ -496,9 +485,9 @@ def __enter__(self) -> None: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc: BaseException | None, + tb: TracebackType | None, ) -> None: # __exit__ should exist in pair with __enter__ but never executed pass # pragma: no cover @@ -509,9 +498,9 @@ async def __aenter__(self) -> Self: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], - exc: Optional[BaseException], - tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc: BaseException | None, + tb: TracebackType | None, ) -> None: await self.close() @@ -648,10 +637,10 @@ def set_dict(app: Any, key: str, value: Any) -> None: return app -def _create_transport(sslcontext: Optional[SSLContext] = None) -> mock.Mock: +def _create_transport(sslcontext: SSLContext | None = None) -> mock.Mock: transport = mock.Mock() - def get_extra_info(key: str) -> Optional[SSLContext]: + def get_extra_info(key: str) -> SSLContext | None: if key == "sslcontext": return sslcontext else: @@ -674,7 +663,7 @@ def make_mocked_request( protocol: Any = sentinel, transport: Any = sentinel, payload: StreamReader = EMPTY_PAYLOAD, - sslcontext: Optional[SSLContext] = None, + sslcontext: SSLContext | None = None, client_max_size: int = 1024**2, loop: Any = ..., ) -> Request: diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py index 568fa7f9e38..2f66878d2ee 100644 --- a/aiohttp/tracing.py +++ b/aiohttp/tracing.py @@ -1,5 +1,6 @@ +from collections.abc import Mapping from types import SimpleNamespace -from typing import TYPE_CHECKING, Mapping, Optional, Type, TypeVar +from typing import TYPE_CHECKING, TypeVar import attr from aiosignal import Signal @@ -40,7 +41,7 @@ class TraceConfig: """First-class used to trace requests launched via ClientSession objects.""" def __init__( - self, trace_config_ctx_factory: Type[SimpleNamespace] = SimpleNamespace + self, trace_config_ctx_factory: type[SimpleNamespace] = SimpleNamespace ) -> None: self._on_request_start: _TracingSignal[TraceRequestStartParams] = Signal(self) self._on_request_chunk_sent: _TracingSignal[TraceRequestChunkSentParams] = ( @@ -86,7 +87,7 @@ def __init__( self._trace_config_ctx_factory = trace_config_ctx_factory def trace_config_ctx( - self, trace_request_ctx: Optional[Mapping[str, str]] = None + self, trace_request_ctx: Mapping[str, str] | None = None ) -> SimpleNamespace: """Return a new trace_config_ctx instance""" return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx) diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py index cc8c0825b4e..dd7ad257460 100644 --- a/aiohttp/typedefs.py +++ b/aiohttp/typedefs.py @@ -1,16 +1,7 @@ import json import os -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Iterable, - Mapping, - Protocol, - Tuple, - Union, -) +from collections.abc import Awaitable, Callable, Iterable, Mapping +from typing import TYPE_CHECKING, Any, Protocol, Union from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr from yarl import URL, Query as _Query @@ -42,14 +33,14 @@ Mapping[istr, str], _CIMultiDict, _CIMultiDictProxy, - Iterable[Tuple[Union[str, istr], str]], + Iterable[tuple[str | istr, str]], ] -RawHeaders = Tuple[Tuple[bytes, bytes], ...] +RawHeaders = tuple[tuple[bytes, bytes], ...] StrOrURL = Union[str, URL] LooseCookiesMappings = Mapping[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] LooseCookiesIterables = Iterable[ - Tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] + tuple[str, Union[str, "BaseCookie[str]", "Morsel[Any]"]] ] LooseCookies = Union[ LooseCookiesMappings, diff --git a/aiohttp/web.py b/aiohttp/web.py index 8307ff405ca..a77447d3a2e 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -5,22 +5,10 @@ import sys import warnings from argparse import ArgumentParser -from collections.abc import Iterable +from collections.abc import Awaitable, Callable, Iterable, Iterable as TypingIterable from contextlib import suppress from importlib import import_module -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Iterable as TypingIterable, - List, - Optional, - Set, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, cast from .abc import AbstractAccessLogger from .helpers import AppKey as AppKey @@ -303,23 +291,23 @@ async def _run_app( - app: Union[Application, Awaitable[Application]], + app: Application | Awaitable[Application], *, - host: Optional[Union[str, HostSequence]] = None, - port: Optional[int] = None, - path: Union[PathLike, TypingIterable[PathLike], None] = None, - sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + host: str | HostSequence | None = None, + port: int | None = None, + path: PathLike | TypingIterable[PathLike] | None = None, + sock: socket.socket | TypingIterable[socket.socket] | None = None, shutdown_timeout: float = 60.0, keepalive_timeout: float = 75.0, - ssl_context: Optional[SSLContext] = None, - print: Optional[Callable[..., None]] = print, + ssl_context: SSLContext | None = None, + print: Callable[..., None] | None = print, backlog: int = 128, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_class: type[AbstractAccessLogger] = AccessLogger, access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: Optional[logging.Logger] = access_logger, + access_log: logging.Logger | None = access_logger, handle_signals: bool = True, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, handler_cancellation: bool = False, ) -> None: # An internal function to actually do all dirty job for application running @@ -341,7 +329,7 @@ async def _run_app( await runner.setup() - sites: List[BaseSite] = [] + sites: list[BaseSite] = [] try: if host is not None: @@ -441,7 +429,7 @@ async def _run_app( def _cancel_tasks( - to_cancel: Set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop + to_cancel: set["asyncio.Task[Any]"], loop: asyncio.AbstractEventLoop ) -> None: if not to_cancel: return @@ -465,25 +453,25 @@ def _cancel_tasks( def run_app( - app: Union[Application, Awaitable[Application]], + app: Application | Awaitable[Application], *, - host: Optional[Union[str, HostSequence]] = None, - port: Optional[int] = None, - path: Union[PathLike, TypingIterable[PathLike], None] = None, - sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, + host: str | HostSequence | None = None, + port: int | None = None, + path: PathLike | TypingIterable[PathLike] | None = None, + sock: socket.socket | TypingIterable[socket.socket] | None = None, shutdown_timeout: float = 60.0, keepalive_timeout: float = 75.0, - ssl_context: Optional[SSLContext] = None, - print: Optional[Callable[..., None]] = print, + ssl_context: SSLContext | None = None, + print: Callable[..., None] | None = print, backlog: int = 128, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_class: type[AbstractAccessLogger] = AccessLogger, access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: Optional[logging.Logger] = access_logger, + access_log: logging.Logger | None = access_logger, handle_signals: bool = True, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, handler_cancellation: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, ) -> None: """Run an app locally""" if loop is None: @@ -534,7 +522,7 @@ def run_app( loop.close() -def main(argv: List[str]) -> None: +def main(argv: list[str]) -> None: arg_parser = ArgumentParser( description="aiohttp.web Application server", prog="aiohttp.web" ) diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py index 619c0085da1..432ee100804 100644 --- a/aiohttp/web_app.py +++ b/aiohttp/web_app.py @@ -1,28 +1,18 @@ import asyncio import logging import warnings -from functools import lru_cache, partial, update_wrapper -from typing import ( - TYPE_CHECKING, - Any, +from collections.abc import ( AsyncIterator, Awaitable, Callable, - Dict, Iterable, Iterator, - List, Mapping, MutableMapping, - Optional, Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, - overload, ) +from functools import lru_cache, partial, update_wrapper +from typing import TYPE_CHECKING, Any, Optional, TypeVar, cast, overload from aiosignal import Signal from frozenlist import FrozenList @@ -65,15 +55,15 @@ _AppSignal = Signal["Application"] _RespPrepareSignal = Signal[Request, StreamResponse] _Middlewares = FrozenList[Middleware] - _MiddlewaresHandlers = Optional[Sequence[Tuple[Middleware, bool]]] - _Subapps = List["Application"] + _MiddlewaresHandlers = Optional[Sequence[tuple[Middleware, bool]]] + _Subapps = list["Application"] else: # No type checker mode, skip types _AppSignal = Signal _RespPrepareSignal = Signal _Middlewares = FrozenList _MiddlewaresHandlers = Optional[Sequence] - _Subapps = List + _Subapps = list _T = TypeVar("_T") _U = TypeVar("_U") @@ -81,7 +71,7 @@ def _build_middlewares( - handler: Handler, apps: Tuple["Application", ...] + handler: Handler, apps: tuple["Application", ...] ) -> Callable[[Request], Awaitable[StreamResponse]]: """Apply middlewares to handler.""" for app in apps[::-1]: @@ -93,7 +83,7 @@ def _build_middlewares( _cached_build_middleware = lru_cache(maxsize=1024)(_build_middlewares) -class Application(MutableMapping[Union[str, AppKey[Any]], Any]): +class Application(MutableMapping[str | AppKey[Any], Any]): ATTRS = frozenset( [ "logger", @@ -122,11 +112,11 @@ def __init__( self, *, logger: logging.Logger = web_logger, - router: Optional[UrlDispatcher] = None, + router: UrlDispatcher | None = None, middlewares: Iterable[Middleware] = (), - handler_args: Optional[Mapping[str, Any]] = None, + handler_args: Mapping[str, Any] | None = None, client_max_size: int = 1024**2, - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, debug: Any = ..., # mypy doesn't support ellipsis ) -> None: if router is None: @@ -157,10 +147,10 @@ def __init__( # initialized on freezing self._middlewares_handlers: _MiddlewaresHandlers = None # initialized on freezing - self._run_middlewares: Optional[bool] = None + self._run_middlewares: bool | None = None self._has_legacy_middlewares: bool = True - self._state: Dict[Union[AppKey[Any], str], object] = {} + self._state: dict[AppKey[Any] | str, object] = {} self._frozen = False self._pre_frozen = False self._subapps: _Subapps = [] @@ -174,10 +164,9 @@ def __init__( self._on_cleanup.append(self._cleanup_ctx._on_cleanup) self._client_max_size = client_max_size - def __init_subclass__(cls: Type["Application"]) -> None: + def __init_subclass__(cls: type["Application"]) -> None: warnings.warn( - "Inheritance class {} from web.Application " - "is discouraged".format(cls.__name__), + f"Inheritance class {cls.__name__} from web.Application " "is discouraged", DeprecationWarning, stacklevel=3, ) @@ -187,8 +176,8 @@ def __init_subclass__(cls: Type["Application"]) -> None: def __setattr__(self, name: str, val: Any) -> None: if name not in self.ATTRS: warnings.warn( - "Setting custom web.Application.{} attribute " - "is discouraged".format(name), + f"Setting custom web.Application.{name} attribute " + "is discouraged", DeprecationWarning, stacklevel=2, ) @@ -205,7 +194,7 @@ def __getitem__(self, key: AppKey[_T]) -> _T: ... @overload def __getitem__(self, key: str) -> Any: ... - def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any: + def __getitem__(self, key: str | AppKey[_T]) -> Any: return self._state[key] def _check_frozen(self) -> None: @@ -222,7 +211,7 @@ def __setitem__(self, key: AppKey[_T], value: _T) -> None: ... @overload def __setitem__(self, key: str, value: Any) -> None: ... - def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None: + def __setitem__(self, key: str | AppKey[_T], value: Any) -> None: self._check_frozen() if not isinstance(key, AppKey): warnings.warn( @@ -234,29 +223,29 @@ def __setitem__(self, key: Union[str, AppKey[_T]], value: Any) -> None: ) self._state[key] = value - def __delitem__(self, key: Union[str, AppKey[_T]]) -> None: + def __delitem__(self, key: str | AppKey[_T]) -> None: self._check_frozen() del self._state[key] def __len__(self) -> int: return len(self._state) - def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]: + def __iter__(self) -> Iterator[str | AppKey[Any]]: return iter(self._state) def __hash__(self) -> int: return id(self) @overload # type: ignore[override] - def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]: ... + def get(self, key: AppKey[_T], default: None = ...) -> _T | None: ... @overload - def get(self, key: AppKey[_T], default: _U) -> Union[_T, _U]: ... + def get(self, key: AppKey[_T], default: _U) -> _T | _U: ... @overload def get(self, key: str, default: Any = ...) -> Any: ... - def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any: + def get(self, key: str | AppKey[_T], default: Any = None) -> Any: return self._state.get(key, default) ######## @@ -268,7 +257,7 @@ def loop(self) -> asyncio.AbstractEventLoop: warnings.warn("loop property is deprecated", DeprecationWarning, stacklevel=2) return cast(asyncio.AbstractEventLoop, self._loop) - def _set_loop(self, loop: Optional[asyncio.AbstractEventLoop]) -> None: + def _set_loop(self, loop: asyncio.AbstractEventLoop | None) -> None: if loop is None: loop = asyncio.get_event_loop() if self._loop is not None and self._loop is not loop: @@ -385,7 +374,7 @@ def add_domain(self, domain: str, subapp: "Application") -> MatchedSubAppResourc factory = partial(MatchedSubAppResource, rule, subapp) return self._add_subapp(factory, subapp) - def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> list[AbstractRoute]: return self.router.add_routes(routes) @property @@ -419,15 +408,15 @@ def middlewares(self) -> _Middlewares: def _make_handler( self, *, - loop: Optional[asyncio.AbstractEventLoop] = None, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, + loop: asyncio.AbstractEventLoop | None = None, + access_log_class: type[AbstractAccessLogger] = AccessLogger, **kwargs: Any, ) -> Server: if not issubclass(access_log_class, AbstractAccessLogger): raise TypeError( "access_log_class must be subclass of " - "aiohttp.abc.AbstractAccessLogger, got {}".format(access_log_class) + f"aiohttp.abc.AbstractAccessLogger, got {access_log_class}" ) self._set_loop(loop) @@ -449,8 +438,8 @@ def _make_handler( def make_handler( self, *, - loop: Optional[asyncio.AbstractEventLoop] = None, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, + loop: asyncio.AbstractEventLoop | None = None, + access_log_class: type[AbstractAccessLogger] = AccessLogger, **kwargs: Any, ) -> Server: @@ -496,7 +485,7 @@ def _make_request( protocol: RequestHandler, writer: AbstractStreamWriter, task: "asyncio.Task[None]", - _cls: Type[Request] = Request, + _cls: type[Request] = Request, ) -> Request: if TYPE_CHECKING: assert self._loop is not None @@ -510,7 +499,7 @@ def _make_request( client_max_size=self._client_max_size, ) - def _prepare_middleware(self) -> Iterator[Tuple[Middleware, bool]]: + def _prepare_middleware(self) -> Iterator[tuple[Middleware, bool]]: for m in reversed(self._middlewares): if getattr(m, "__middleware_version__", None) == 1: yield m, True @@ -532,7 +521,7 @@ async def _handle(self, request: Request) -> StreamResponse: if not isinstance(match_info, AbstractMatchInfo): raise TypeError( "match_info should be AbstractMatchInfo " - "instance, not {!r}".format(match_info) + f"instance, not {match_info!r}" ) match_info.add_app(self) @@ -581,8 +570,8 @@ def __bool__(self) -> bool: class CleanupError(RuntimeError): @property - def exceptions(self) -> List[BaseException]: - return cast(List[BaseException], self.args[1]) + def exceptions(self) -> list[BaseException]: + return cast(list[BaseException], self.args[1]) if TYPE_CHECKING: @@ -594,7 +583,7 @@ def exceptions(self) -> List[BaseException]: class CleanupContext(_CleanupContextBase): def __init__(self) -> None: super().__init__() - self._exits: List[AsyncIterator[None]] = [] + self._exits: list[AsyncIterator[None]] = [] async def _on_startup(self, app: Application) -> None: for cb in self: diff --git a/aiohttp/web_exceptions.py b/aiohttp/web_exceptions.py index ee2c1e72d40..8b914257a26 100644 --- a/aiohttp/web_exceptions.py +++ b/aiohttp/web_exceptions.py @@ -90,11 +90,11 @@ class HTTPException(Response, Exception): def __init__( self, *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, + headers: LooseHeaders | None = None, + reason: str | None = None, body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, + text: str | None = None, + content_type: str | None = None, ) -> None: if body is not None: warnings.warn( @@ -170,11 +170,11 @@ def __init__( self, location: StrOrURL, *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, + headers: LooseHeaders | None = None, + reason: str | None = None, body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, + text: str | None = None, + content_type: str | None = None, ) -> None: if not location: raise ValueError("HTTP redirects need a location to redirect to.") @@ -263,11 +263,11 @@ def __init__( method: str, allowed_methods: Iterable[str], *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, + headers: LooseHeaders | None = None, + reason: str | None = None, body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, + text: str | None = None, + content_type: str | None = None, ) -> None: allow = ",".join(sorted(allowed_methods)) super().__init__( @@ -278,7 +278,7 @@ def __init__( content_type=content_type, ) self.headers["Allow"] = allow - self.allowed_methods: Set[str] = set(allowed_methods) + self.allowed_methods: set[str] = set(allowed_methods) self.method = method.upper() @@ -316,8 +316,8 @@ class HTTPRequestEntityTooLarge(HTTPClientError): def __init__(self, max_size: float, actual_size: float, **kwargs: Any) -> None: kwargs.setdefault( "text", - "Maximum request body size {} exceeded, " - "actual body size {}".format(max_size, actual_size), + f"Maximum request body size {max_size} exceeded, " + f"actual body size {actual_size}", ) super().__init__(**kwargs) @@ -371,13 +371,13 @@ class HTTPUnavailableForLegalReasons(HTTPClientError): def __init__( self, - link: Optional[StrOrURL], + link: StrOrURL | None, *, - headers: Optional[LooseHeaders] = None, - reason: Optional[str] = None, + headers: LooseHeaders | None = None, + reason: str | None = None, body: Any = None, - text: Optional[str] = None, - content_type: Optional[str] = None, + text: str | None = None, + content_type: str | None = None, ) -> None: super().__init__( headers=headers, @@ -392,7 +392,7 @@ def __init__( self.headers["Link"] = f'<{str(self._link)}>; rel="blocked-by"' @property - def link(self) -> Optional[URL]: + def link(self) -> URL | None: return self._link diff --git a/aiohttp/web_fileresponse.py b/aiohttp/web_fileresponse.py index 26484b9483a..15672f39df7 100644 --- a/aiohttp/web_fileresponse.py +++ b/aiohttp/web_fileresponse.py @@ -86,7 +86,7 @@ class _FileResponseResult(Enum): CONTENT_TYPES.add_type(content_type, extension) -_CLOSE_FUTURES: Set[asyncio.Future[None]] = set() +_CLOSE_FUTURES: set[asyncio.Future[None]] = set() class FileResponse(StreamResponse): @@ -97,8 +97,8 @@ def __init__( path: PathLike, chunk_size: int = 256 * 1024, status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, + reason: str | None = None, + headers: LooseHeaders | None = None, ) -> None: super().__init__(status=status, reason=reason, headers=headers) @@ -152,7 +152,7 @@ async def _sendfile( return writer @staticmethod - def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool: + def _etag_match(etag_value: str, etags: tuple[ETag, ...], *, weak: bool) -> bool: if len(etags) == 1 and etags[0].value == ETAG_ANY: return True return any( @@ -161,7 +161,7 @@ def _etag_match(etag_value: str, etags: Tuple[ETag, ...], *, weak: bool) -> bool async def _not_modified( self, request: "BaseRequest", etag_value: str, last_modified: float - ) -> Optional[AbstractStreamWriter]: + ) -> AbstractStreamWriter | None: self.set_status(HTTPNotModified.status_code) self._length_check = False self.etag = etag_value @@ -172,15 +172,15 @@ async def _not_modified( async def _precondition_failed( self, request: "BaseRequest" - ) -> Optional[AbstractStreamWriter]: + ) -> AbstractStreamWriter | None: self.set_status(HTTPPreconditionFailed.status_code) self.content_length = 0 return await super().prepare(request) def _make_response( self, request: "BaseRequest", accept_encoding: str - ) -> Tuple[ - _FileResponseResult, Optional[io.BufferedReader], os.stat_result, Optional[str] + ) -> tuple[ + _FileResponseResult, io.BufferedReader | None, os.stat_result, str | None ]: """Return the response result, io object, stat result, and encoding. @@ -235,7 +235,7 @@ def _make_response( def _get_file_path_stat_encoding( self, accept_encoding: str - ) -> Tuple[Optional[pathlib.Path], os.stat_result, Optional[str]]: + ) -> tuple[pathlib.Path | None, os.stat_result, str | None]: file_path = self._path for file_extension, file_encoding in ENCODING_EXTENSIONS.items(): if file_encoding not in accept_encoding: @@ -252,7 +252,7 @@ def _get_file_path_stat_encoding( st = file_path.stat() return file_path if S_ISREG(st.st_mode) else None, st, None - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + async def prepare(self, request: "BaseRequest") -> AbstractStreamWriter | None: loop = asyncio.get_running_loop() # Encoding comparisons should be case-insensitive # https://www.rfc-editor.org/rfc/rfc9110#section-8.4.1 @@ -302,13 +302,13 @@ async def _prepare_open_file( request: "BaseRequest", fobj: io.BufferedReader, st: os.stat_result, - file_encoding: Optional[str], - ) -> Optional[AbstractStreamWriter]: + file_encoding: str | None, + ) -> AbstractStreamWriter | None: status = self._status file_size: int = st.st_size file_mtime: float = st.st_mtime count: int = file_size - start: Optional[int] = None + start: int | None = None if (ifrange := request.if_range) is None or file_mtime <= ifrange.timestamp(): # If-Range header check: @@ -321,7 +321,7 @@ async def _prepare_open_file( try: rng = request.http_range start = rng.start - end: Optional[int] = rng.stop + end: int | None = rng.stop except ValueError: # https://tools.ietf.org/html/rfc7233: # A server generating a 416 (Range Not Satisfiable) response to @@ -404,8 +404,8 @@ async def _prepare_open_file( if status == HTTPPartialContent.status_code: real_start = start assert real_start is not None - self._headers[hdrs.CONTENT_RANGE] = "bytes {}-{}/{}".format( - real_start, real_start + count - 1, file_size + self._headers[hdrs.CONTENT_RANGE] = ( + f"bytes {real_start}-{real_start + count - 1}/{file_size}" ) # If we are sending 0 bytes calling sendfile() will throw a ValueError diff --git a/aiohttp/web_log.py b/aiohttp/web_log.py index d5ea2beeb15..effdf53c3ce 100644 --- a/aiohttp/web_log.py +++ b/aiohttp/web_log.py @@ -58,7 +58,7 @@ class AccessLogger(AbstractAccessLogger): LOG_FORMAT = '%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"' FORMAT_RE = re.compile(r"%(\{([A-Za-z0-9\-_]+)\}([ioe])|[atPrsbOD]|Tf?)") CLEANUP_RE = re.compile(r"(%[^s])") - _FORMAT_CACHE: Dict[str, Tuple[str, List[KeyMethod]]] = {} + _FORMAT_CACHE: dict[str, tuple[str, list[KeyMethod]]] = {} def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None: """Initialise the logger. @@ -76,7 +76,7 @@ def __init__(self, logger: logging.Logger, log_format: str = LOG_FORMAT) -> None self._log_format, self._methods = _compiled_format - def compile_format(self, log_format: str) -> Tuple[str, List[KeyMethod]]: + def compile_format(self, log_format: str) -> tuple[str, list[KeyMethod]]: """Translate log_format into form usable by modulo formatting All known atoms will be replaced with %s @@ -156,12 +156,7 @@ def _format_P(request: BaseRequest, response: StreamResponse, time: float) -> st def _format_r(request: BaseRequest, response: StreamResponse, time: float) -> str: if request is None: return "-" - return "{} {} HTTP/{}.{}".format( - request.method, - request.path_qs, - request.version.major, - request.version.minor, - ) + return f"{request.method} {request.path_qs} HTTP/{request.version.major}.{request.version.minor}" @staticmethod def _format_s(request: BaseRequest, response: StreamResponse, time: float) -> int: @@ -185,7 +180,7 @@ def _format_D(request: BaseRequest, response: StreamResponse, time: float) -> st def _format_line( self, request: BaseRequest, response: StreamResponse, time: float - ) -> Iterable[Tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: + ) -> Iterable[tuple[str, Callable[[BaseRequest, StreamResponse, float], str]]]: return [(key, method(request, response, time)) for key, method in self._methods] @property diff --git a/aiohttp/web_middlewares.py b/aiohttp/web_middlewares.py index 2f1f5f58e6e..9816a382fdc 100644 --- a/aiohttp/web_middlewares.py +++ b/aiohttp/web_middlewares.py @@ -1,5 +1,5 @@ import re -from typing import TYPE_CHECKING, Tuple, Type, TypeVar +from typing import TYPE_CHECKING, TypeVar from .typedefs import Handler, Middleware from .web_exceptions import HTTPMove, HTTPPermanentRedirect @@ -18,7 +18,7 @@ _Func = TypeVar("_Func") -async def _check_request_resolves(request: Request, path: str) -> Tuple[bool, Request]: +async def _check_request_resolves(request: Request, path: str) -> tuple[bool, Request]: alt_request = request.clone(rel_url=path) match_info = await request.app.router.resolve(alt_request) @@ -40,7 +40,7 @@ def normalize_path_middleware( append_slash: bool = True, remove_slash: bool = False, merge_slashes: bool = True, - redirect_class: Type[HTTPMove] = HTTPPermanentRedirect, + redirect_class: type[HTTPMove] = HTTPPermanentRedirect, ) -> Middleware: """Factory for producing a middleware that normalizes the path of a request. diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py index e1923aac24b..44f5055507b 100644 --- a/aiohttp/web_protocol.py +++ b/aiohttp/web_protocol.py @@ -4,23 +4,12 @@ import traceback import warnings from collections import deque +from collections.abc import Awaitable, Callable, Sequence from contextlib import suppress from html import escape as html_escape from http import HTTPStatus from logging import Logger -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - Deque, - Optional, - Sequence, - Tuple, - Type, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Optional, cast import attr import yarl @@ -98,7 +87,7 @@ class _ErrInfo: message: str -_MsgType = Tuple[Union[RawRequestMessage, _ErrInfo], StreamReader] +_MsgType = tuple[RawRequestMessage | _ErrInfo, StreamReader] class RequestHandler(BaseProtocol): @@ -183,7 +172,7 @@ def __init__( keepalive_timeout: float = 3630, tcp_keepalive: bool = True, logger: Logger = server_logger, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, + access_log_class: type[AbstractAccessLogger] = AccessLogger, access_log: Logger = access_logger, access_log_format: str = AccessLogger.LOG_FORMAT, debug: bool = False, @@ -200,28 +189,28 @@ def __init__( # _request_count is the number of requests processed with the same connection. self._request_count = 0 self._keepalive = False - self._current_request: Optional[BaseRequest] = None - self._manager: Optional[Server] = manager - self._request_handler: Optional[_RequestHandler] = manager.request_handler - self._request_factory: Optional[_RequestFactory] = manager.request_factory + self._current_request: BaseRequest | None = None + self._manager: Server | None = manager + self._request_handler: _RequestHandler | None = manager.request_handler + self._request_factory: _RequestFactory | None = manager.request_factory self._tcp_keepalive = tcp_keepalive # placeholder to be replaced on keepalive timeout setup self._next_keepalive_close_time = 0.0 - self._keepalive_handle: Optional[asyncio.Handle] = None + self._keepalive_handle: asyncio.Handle | None = None self._keepalive_timeout = keepalive_timeout self._lingering_time = float(lingering_time) - self._messages: Deque[_MsgType] = deque() + self._messages: deque[_MsgType] = deque() self._message_tail = b"" - self._waiter: Optional[asyncio.Future[None]] = None - self._handler_waiter: Optional[asyncio.Future[None]] = None - self._task_handler: Optional[asyncio.Task[None]] = None + self._waiter: asyncio.Future[None] | None = None + self._handler_waiter: asyncio.Future[None] | None = None + self._task_handler: asyncio.Task[None] | None = None self._upgrade = False self._payload_parser: Any = None - self._request_parser: Optional[HttpRequestParser] = HttpRequestParser( + self._request_parser: HttpRequestParser | None = HttpRequestParser( self, loop, read_bufsize, @@ -242,7 +231,7 @@ def __init__( self.debug = debug self.access_log = access_log if access_log: - self.access_logger: Optional[AbstractAccessLogger] = access_log_class( + self.access_logger: AbstractAccessLogger | None = access_log_class( access_log, access_log_format ) self._logging_enabled = self.access_logger.enabled @@ -273,7 +262,7 @@ def ssl_context(self) -> Optional["ssl.SSLContext"]: @under_cached_property def peername( self, - ) -> Optional[Union[str, Tuple[str, int, int, int], Tuple[str, int]]]: + ) -> str | tuple[str, int, int, int] | tuple[str, int] | None: """Return peername if available.""" return ( None @@ -285,7 +274,7 @@ def peername( def keepalive_timeout(self) -> float: return self._keepalive_timeout - async def shutdown(self, timeout: Optional[float] = 15.0) -> None: + async def shutdown(self, timeout: float | None = 15.0) -> None: """Do worker process exit preparations. We need to clean up everything and stop accepting requests. @@ -352,7 +341,7 @@ def connection_made(self, transport: asyncio.BaseTransport) -> None: task = loop.create_task(self.start()) self._task_handler = task - def connection_lost(self, exc: Optional[BaseException]) -> None: + def connection_lost(self, exc: BaseException | None) -> None: if self._manager is None: return self._manager.connection_lost(self, exc) @@ -466,7 +455,7 @@ def force_close(self) -> None: self.transport = None def log_access( - self, request: BaseRequest, response: StreamResponse, time: Optional[float] + self, request: BaseRequest, response: StreamResponse, time: float | None ) -> None: if self.access_logger is not None and self.access_logger.enabled: if TYPE_CHECKING: @@ -500,9 +489,9 @@ def _process_keepalive(self) -> None: async def _handle_request( self, request: BaseRequest, - start_time: Optional[float], + start_time: float | None, request_handler: Callable[[BaseRequest], Awaitable[StreamResponse]], - ) -> Tuple[StreamResponse, bool]: + ) -> tuple[StreamResponse, bool]: self._request_in_progress = True try: try: @@ -678,8 +667,8 @@ async def start(self) -> None: self.transport.close() async def finish_response( - self, request: BaseRequest, resp: StreamResponse, start_time: Optional[float] - ) -> Tuple[StreamResponse, bool]: + self, request: BaseRequest, resp: StreamResponse, start_time: float | None + ) -> tuple[StreamResponse, bool]: """Prepare the response and write_eof, then log access. This has to @@ -701,8 +690,7 @@ async def finish_response( self.log_exception("Missing return statement on request handler") else: self.log_exception( - "Web-handler should return a response instance, " - "got {!r}".format(resp) + "Web-handler should return a response instance, " f"got {resp!r}" ) exc = HTTPInternalServerError() resp = Response( @@ -723,8 +711,8 @@ def handle_error( self, request: BaseRequest, status: int = 500, - exc: Optional[BaseException] = None, - message: Optional[str] = None, + exc: BaseException | None = None, + message: str | None = None, ) -> StreamResponse: """Handle errors. @@ -753,7 +741,7 @@ def handle_error( ct = "text/plain" if status == HTTPStatus.INTERNAL_SERVER_ERROR: - title = "{0.value} {0.phrase}".format(HTTPStatus.INTERNAL_SERVER_ERROR) + title = f"{HTTPStatus.INTERNAL_SERVER_ERROR.value} {HTTPStatus.INTERNAL_SERVER_ERROR.phrase}" msg = HTTPStatus.INTERNAL_SERVER_ERROR.description tb = None if self.debug: @@ -766,10 +754,10 @@ def handle_error( msg = f"

Traceback:

\n
{tb}
" message = ( "" - "{title}" - "\n

{title}

" - "\n{msg}\n\n" - ).format(title=title, msg=msg) + f"{title}" + f"\n

{title}

" + f"\n{msg}\n\n" + ) ct = "text/html" else: if tb: diff --git a/aiohttp/web_request.py b/aiohttp/web_request.py index 0bc69b74db9..644d55631fe 100644 --- a/aiohttp/web_request.py +++ b/aiohttp/web_request.py @@ -7,21 +7,10 @@ import tempfile import types import warnings +from collections.abc import Iterator, Mapping, MutableMapping +from re import Pattern from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Final, - Iterator, - Mapping, - MutableMapping, - Optional, - Pattern, - Tuple, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Final, Optional, cast from urllib.parse import parse_qsl import attr @@ -95,15 +84,9 @@ class FileField: _QUOTED_PAIR: Final[str] = r"\\[\t !-~]" -_QUOTED_STRING: Final[str] = r'"(?:{quoted_pair}|{qdtext})*"'.format( - qdtext=_QDTEXT, quoted_pair=_QUOTED_PAIR -) +_QUOTED_STRING: Final[str] = rf'"(?:{_QUOTED_PAIR}|{_QDTEXT})*"' -_FORWARDED_PAIR: Final[str] = ( - r"({token})=({token}|{quoted_string})(:\d{{1,4}})?".format( - token=_TOKEN, quoted_string=_QUOTED_STRING - ) -) +_FORWARDED_PAIR: Final[str] = rf"({_TOKEN})=({_TOKEN}|{_QUOTED_STRING})(:\d{{1,4}})?" _QUOTED_PAIR_REPLACE_RE: Final[Pattern[str]] = re.compile(r"\\([\t !-~])") # same pattern as _QUOTED_PAIR but contains a capture group @@ -146,8 +129,8 @@ class BaseRequest(MutableMapping[str, Any], HeadersMixin): "_transport_peername", ] ) - _post: Optional[MultiDictProxy[Union[str, bytes, FileField]]] = None - _read_bytes: Optional[bytes] = None + _post: MultiDictProxy[str | bytes | FileField] | None = None + _read_bytes: bytes | None = None def __init__( self, @@ -159,10 +142,10 @@ def __init__( loop: asyncio.AbstractEventLoop, *, client_max_size: int = 1024**2, - state: Optional[Dict[str, Any]] = None, - scheme: Optional[str] = None, - host: Optional[str] = None, - remote: Optional[str] = None, + state: dict[str, Any] | None = None, + scheme: str | None = None, + host: str | None = None, + remote: str | None = None, ) -> None: self._message = message self._protocol = protocol @@ -172,7 +155,7 @@ def __init__( self._headers: CIMultiDictProxy[str] = message.headers self._method = message.method self._version = message.version - self._cache: Dict[str, Any] = {} + self._cache: dict[str, Any] = {} url = message.url if url.absolute: if scheme is not None: @@ -207,13 +190,13 @@ def __init__( def clone( self, *, - method: Union[str, _SENTINEL] = sentinel, - rel_url: Union[StrOrURL, _SENTINEL] = sentinel, - headers: Union[LooseHeaders, _SENTINEL] = sentinel, - scheme: Union[str, _SENTINEL] = sentinel, - host: Union[str, _SENTINEL] = sentinel, - remote: Union[str, _SENTINEL] = sentinel, - client_max_size: Union[int, _SENTINEL] = sentinel, + method: str | _SENTINEL = sentinel, + rel_url: StrOrURL | _SENTINEL = sentinel, + headers: LooseHeaders | _SENTINEL = sentinel, + scheme: str | _SENTINEL = sentinel, + host: str | _SENTINEL = sentinel, + remote: str | _SENTINEL = sentinel, + client_max_size: int | _SENTINEL = sentinel, ) -> "BaseRequest": """Clone itself with replacement some attributes. @@ -224,7 +207,7 @@ def clone( if self._read_bytes: raise RuntimeError("Cannot clone request after reading its content") - dct: Dict[str, Any] = {} + dct: dict[str, Any] = {} if method is not sentinel: dct["method"] = method if rel_url is not sentinel: @@ -272,7 +255,7 @@ def protocol(self) -> "RequestHandler": return self._protocol @property - def transport(self) -> Optional[asyncio.Transport]: + def transport(self) -> asyncio.Transport | None: if self._protocol is None: return None return self._protocol.transport @@ -326,7 +309,7 @@ def secure(self) -> bool: return self.scheme == "https" @reify - def forwarded(self) -> Tuple[Mapping[str, str], ...]: + def forwarded(self) -> tuple[Mapping[str, str], ...]: """A tuple containing all parsed Forwarded header(s). Makes an effort to parse Forwarded headers as specified by RFC 7239: @@ -350,7 +333,7 @@ def forwarded(self) -> Tuple[Mapping[str, str], ...]: length = len(field_value) pos = 0 need_separator = False - elem: Dict[str, str] = {} + elem: dict[str, str] = {} elems.append(types.MappingProxyType(elem)) while 0 <= pos < length: match = _FORWARDED_PAIR_RE.match(field_value, pos) @@ -438,7 +421,7 @@ def host(self) -> str: return socket.getfqdn() @reify - def remote(self) -> Optional[str]: + def remote(self) -> str | None: """Remote IP of client initiated HTTP request. The IP is resolved in this order: @@ -509,7 +492,7 @@ def raw_headers(self) -> RawHeaders: return self._message.raw_headers @reify - def if_modified_since(self) -> Optional[datetime.datetime]: + def if_modified_since(self) -> datetime.datetime | None: """The value of If-Modified-Since HTTP header, or None. This header is represented as a `datetime` object. @@ -517,7 +500,7 @@ def if_modified_since(self) -> Optional[datetime.datetime]: return parse_http_date(self.headers.get(hdrs.IF_MODIFIED_SINCE)) @reify - def if_unmodified_since(self) -> Optional[datetime.datetime]: + def if_unmodified_since(self) -> datetime.datetime | None: """The value of If-Unmodified-Since HTTP header, or None. This header is represented as a `datetime` object. @@ -547,15 +530,15 @@ def _etag_values(etag_header: str) -> Iterator[ETag]: @classmethod def _if_match_or_none_impl( - cls, header_value: Optional[str] - ) -> Optional[Tuple[ETag, ...]]: + cls, header_value: str | None + ) -> tuple[ETag, ...] | None: if not header_value: return None return tuple(cls._etag_values(header_value)) @reify - def if_match(self) -> Optional[Tuple[ETag, ...]]: + def if_match(self) -> tuple[ETag, ...] | None: """The value of If-Match HTTP header, or None. This header is represented as a `tuple` of `ETag` objects. @@ -563,7 +546,7 @@ def if_match(self) -> Optional[Tuple[ETag, ...]]: return self._if_match_or_none_impl(self.headers.get(hdrs.IF_MATCH)) @reify - def if_none_match(self) -> Optional[Tuple[ETag, ...]]: + def if_none_match(self) -> tuple[ETag, ...] | None: """The value of If-None-Match HTTP header, or None. This header is represented as a `tuple` of `ETag` objects. @@ -571,7 +554,7 @@ def if_none_match(self) -> Optional[Tuple[ETag, ...]]: return self._if_match_or_none_impl(self.headers.get(hdrs.IF_NONE_MATCH)) @reify - def if_range(self) -> Optional[datetime.datetime]: + def if_range(self) -> datetime.datetime | None: """The value of If-Range HTTP header, or None. This header is represented as a `datetime` object. @@ -698,7 +681,7 @@ async def multipart(self) -> MultipartReader: """Return async iterator to process BODY as multipart.""" return MultipartReader(self._headers, self._payload) - async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": + async def post(self) -> "MultiDictProxy[str | bytes | FileField]": """Return POST parameters.""" if self._post is not None: return self._post @@ -715,7 +698,7 @@ async def post(self) -> "MultiDictProxy[Union[str, bytes, FileField]]": self._post = MultiDictProxy(MultiDict()) return self._post - out: MultiDict[Union[str, bytes, FileField]] = MultiDict() + out: MultiDict[str | bytes | FileField] = MultiDict() if content_type == "multipart/form-data": multipart = await self.multipart() @@ -812,9 +795,7 @@ def __repr__(self) -> str: ascii_encodable_path = self.path.encode("ascii", "backslashreplace").decode( "ascii" ) - return "<{} {} {} >".format( - self.__class__.__name__, self._method, ascii_encodable_path - ) + return f"<{self.__class__.__name__} {self._method} {ascii_encodable_path} >" def __eq__(self, other: object) -> bool: return id(self) == id(other) @@ -852,8 +833,8 @@ class Request(BaseRequest): def __setattr__(self, name: str, val: Any) -> None: if name not in self.ATTRS: warnings.warn( - "Setting custom {}.{} attribute " - "is discouraged".format(self.__class__.__name__, name), + f"Setting custom {self.__class__.__name__}.{name} attribute " + "is discouraged", DeprecationWarning, stacklevel=2, ) @@ -862,13 +843,13 @@ def __setattr__(self, name: str, val: Any) -> None: def clone( self, *, - method: Union[str, _SENTINEL] = sentinel, - rel_url: Union[StrOrURL, _SENTINEL] = sentinel, - headers: Union[LooseHeaders, _SENTINEL] = sentinel, - scheme: Union[str, _SENTINEL] = sentinel, - host: Union[str, _SENTINEL] = sentinel, - remote: Union[str, _SENTINEL] = sentinel, - client_max_size: Union[int, _SENTINEL] = sentinel, + method: str | _SENTINEL = sentinel, + rel_url: StrOrURL | _SENTINEL = sentinel, + headers: LooseHeaders | _SENTINEL = sentinel, + scheme: str | _SENTINEL = sentinel, + host: str | _SENTINEL = sentinel, + remote: str | _SENTINEL = sentinel, + client_max_size: int | _SENTINEL = sentinel, ) -> "Request": ret = super().clone( method=method, diff --git a/aiohttp/web_response.py b/aiohttp/web_response.py index e5f8b6cd652..03404029cf3 100644 --- a/aiohttp/web_response.py +++ b/aiohttp/web_response.py @@ -6,19 +6,11 @@ import math import time import warnings +from collections.abc import Iterator, MutableMapping from concurrent.futures import Executor from http import HTTPStatus from http.cookies import SimpleCookie -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterator, - MutableMapping, - Optional, - Union, - cast, -) +from typing import TYPE_CHECKING, Any, Optional, cast from multidict import CIMultiDict, istr @@ -75,29 +67,29 @@ class ContentCoding(enum.Enum): class StreamResponse(BaseClass, HeadersMixin): - _body: Union[None, bytes, bytearray, Payload] + _body: None | bytes | bytearray | Payload _length_check = True _body = None - _keep_alive: Optional[bool] = None + _keep_alive: bool | None = None _chunked: bool = False _compression: bool = False - _compression_strategy: Optional[int] = None - _compression_force: Optional[ContentCoding] = None + _compression_strategy: int | None = None + _compression_force: ContentCoding | None = None _req: Optional["BaseRequest"] = None - _payload_writer: Optional[AbstractStreamWriter] = None + _payload_writer: AbstractStreamWriter | None = None _eof_sent: bool = False - _must_be_empty_body: Optional[bool] = None + _must_be_empty_body: bool | None = None _body_length = 0 - _cookies: Optional[SimpleCookie] = None + _cookies: SimpleCookie | None = None _send_headers_immediately = True def __init__( self, *, status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - _real_headers: Optional[CIMultiDict[str]] = None, + reason: str | None = None, + headers: LooseHeaders | None = None, + _real_headers: CIMultiDict[str] | None = None, ) -> None: """Initialize a new stream response object. @@ -106,7 +98,7 @@ def __init__( the headers when creating a new response object. It is not intended to be used by external code. """ - self._state: Dict[str, Any] = {} + self._state: dict[str, Any] = {} if _real_headers is not None: self._headers = _real_headers @@ -122,7 +114,7 @@ def prepared(self) -> bool: return self._eof_sent or self._payload_writer is not None @property - def task(self) -> "Optional[asyncio.Task[None]]": + def task(self) -> "asyncio.Task[None] | None": if self._req: return self._req.task else: @@ -147,14 +139,14 @@ def reason(self) -> str: def set_status( self, status: int, - reason: Optional[str] = None, + reason: str | None = None, ) -> None: assert ( not self.prepared ), "Cannot change the response status code after the headers have been sent" self._set_status(status, reason) - def _set_status(self, status: int, reason: Optional[str]) -> None: + def _set_status(self, status: int, reason: str | None) -> None: self._status = int(status) if reason is None: reason = REASON_PHRASES.get(self._status, "") @@ -163,7 +155,7 @@ def _set_status(self, status: int, reason: Optional[str]) -> None: self._reason = reason @property - def keep_alive(self) -> Optional[bool]: + def keep_alive(self) -> bool | None: return self._keep_alive def force_close(self) -> None: @@ -179,7 +171,7 @@ def output_length(self) -> int: assert self._payload_writer return self._payload_writer.buffer_size - def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: + def enable_chunked_encoding(self, chunk_size: int | None = None) -> None: """Enables automatic chunked transfer encoding.""" if hdrs.CONTENT_LENGTH in self._headers: raise RuntimeError( @@ -191,8 +183,8 @@ def enable_chunked_encoding(self, chunk_size: Optional[int] = None) -> None: def enable_compression( self, - force: Optional[Union[bool, ContentCoding]] = None, - strategy: Optional[int] = None, + force: bool | ContentCoding | None = None, + strategy: int | None = None, ) -> None: """Enables response compression encoding.""" # Backwards compatibility for when force was a bool <0.17. @@ -225,15 +217,15 @@ def set_cookie( name: str, value: str, *, - expires: Optional[str] = None, - domain: Optional[str] = None, - max_age: Optional[Union[int, str]] = None, + expires: str | None = None, + domain: str | None = None, + max_age: int | str | None = None, path: str = "/", - secure: Optional[bool] = None, - httponly: Optional[bool] = None, - version: Optional[str] = None, - samesite: Optional[str] = None, - partitioned: Optional[bool] = None, + secure: bool | None = None, + httponly: bool | None = None, + version: str | None = None, + samesite: str | None = None, + partitioned: bool | None = None, ) -> None: """Set or update response cookie. @@ -277,11 +269,11 @@ def del_cookie( self, name: str, *, - domain: Optional[str] = None, + domain: str | None = None, path: str = "/", - secure: Optional[bool] = None, - httponly: Optional[bool] = None, - samesite: Optional[str] = None, + secure: bool | None = None, + httponly: bool | None = None, + samesite: str | None = None, ) -> None: """Delete cookie. @@ -303,12 +295,12 @@ def del_cookie( ) @property - def content_length(self) -> Optional[int]: + def content_length(self) -> int | None: # Just a placeholder for adding setter return super().content_length @content_length.setter - def content_length(self, value: Optional[int]) -> None: + def content_length(self, value: int | None) -> None: if value is not None: value = int(value) if self._chunked: @@ -331,12 +323,12 @@ def content_type(self, value: str) -> None: self._generate_content_type_header() @property - def charset(self) -> Optional[str]: + def charset(self) -> str | None: # Just a placeholder for adding setter return super().charset @charset.setter - def charset(self, value: Optional[str]) -> None: + def charset(self, value: str | None) -> None: ctype = self.content_type # read header values if needed if ctype == "application/octet-stream": raise RuntimeError( @@ -351,7 +343,7 @@ def charset(self, value: Optional[str]) -> None: self._generate_content_type_header() @property - def last_modified(self) -> Optional[datetime.datetime]: + def last_modified(self) -> datetime.datetime | None: """The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object. @@ -360,7 +352,7 @@ def last_modified(self) -> Optional[datetime.datetime]: @last_modified.setter def last_modified( - self, value: Optional[Union[int, float, datetime.datetime, str]] + self, value: int | float | datetime.datetime | str | None ) -> None: if value is None: self._headers.pop(hdrs.LAST_MODIFIED, None) @@ -379,7 +371,7 @@ def last_modified( raise TypeError(msg) @property - def etag(self) -> Optional[ETag]: + def etag(self) -> ETag | None: quoted_value = self._headers.get(hdrs.ETAG) if not quoted_value: return None @@ -395,7 +387,7 @@ def etag(self) -> Optional[ETag]: ) @etag.setter - def etag(self, value: Optional[Union[ETag, str]]) -> None: + def etag(self, value: ETag | str | None) -> None: if value is None: self._headers.pop(hdrs.ETAG, None) elif (isinstance(value, str) and value == ETAG_ANY) or ( @@ -451,7 +443,7 @@ async def _start_compression(self, request: "BaseRequest") -> None: await self._do_start_compression(coding) return - async def prepare(self, request: "BaseRequest") -> Optional[AbstractStreamWriter]: + async def prepare(self, request: "BaseRequest") -> AbstractStreamWriter | None: if self._eof_sent: return None if self._payload_writer is not None: @@ -494,7 +486,7 @@ async def _prepare_headers(self) -> None: if version != HttpVersion11: raise RuntimeError( "Using chunked encoding is forbidden " - "for HTTP/{0.major}.{0.minor}".format(request.version) + f"for HTTP/{request.version.major}.{request.version.minor}" ) if not self._must_be_empty_body: writer.enable_chunking() @@ -547,7 +539,7 @@ async def _write_headers(self) -> None: if self._send_headers_immediately: writer.send_headers() - async def write(self, data: Union[bytes, bytearray, memoryview]) -> None: + async def write(self, data: bytes | bytearray | memoryview) -> None: assert isinstance( data, (bytes, bytearray, memoryview) ), "data argument must be byte-ish (%r)" % type(data) @@ -622,7 +614,7 @@ def __bool__(self) -> bool: class Response(StreamResponse): - _compressed_body: Optional[bytes] = None + _compressed_body: bytes | None = None _send_headers_immediately = False def __init__( @@ -630,13 +622,13 @@ def __init__( *, body: Any = None, status: int = 200, - reason: Optional[str] = None, - text: Optional[str] = None, - headers: Optional[LooseHeaders] = None, - content_type: Optional[str] = None, - charset: Optional[str] = None, - zlib_executor_size: Optional[int] = None, - zlib_executor: Optional[Executor] = None, + reason: str | None = None, + text: str | None = None, + headers: LooseHeaders | None = None, + content_type: str | None = None, + charset: str | None = None, + zlib_executor_size: int | None = None, + zlib_executor: Executor | None = None, ) -> None: if body is not None and text is not None: raise ValueError("body and text are not allowed together") @@ -691,7 +683,7 @@ def __init__( self._zlib_executor = zlib_executor @property - def body(self) -> Optional[Union[bytes, Payload]]: + def body(self) -> bytes | Payload | None: return self._body @body.setter @@ -721,7 +713,7 @@ def body(self, body: Any) -> None: self._compressed_body = None @property - def text(self) -> Optional[str]: + def text(self) -> str | None: if self._body is None: return None # Note: When _body is a Payload (e.g. FilePayload), this may do blocking I/O @@ -744,7 +736,7 @@ def text(self, text: str) -> None: self._compressed_body = None @property - def content_length(self) -> Optional[int]: + def content_length(self) -> int | None: if self._chunked: return None @@ -763,14 +755,14 @@ def content_length(self) -> Optional[int]: return 0 @content_length.setter - def content_length(self, value: Optional[int]) -> None: + def content_length(self, value: int | None) -> None: raise RuntimeError("Content length is set automatically") async def write_eof(self, data: bytes = b"") -> None: if self._eof_sent: return if self._compressed_body is None: - body: Optional[Union[bytes, Payload]] = self._body + body: bytes | Payload | None = self._body else: body = self._compressed_body assert not data, f"data arg is not supported, got {data!r}" @@ -833,11 +825,11 @@ async def _do_start_compression(self, coding: ContentCoding) -> None: def json_response( data: Any = sentinel, *, - text: Optional[str] = None, - body: Optional[bytes] = None, + text: str | None = None, + body: bytes | None = None, status: int = 200, - reason: Optional[str] = None, - headers: Optional[LooseHeaders] = None, + reason: str | None = None, + headers: LooseHeaders | None = None, content_type: str = "application/json", dumps: JSONEncoder = json.dumps, ) -> Response: diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py index f51b6cd0081..4c4027218a8 100644 --- a/aiohttp/web_routedef.py +++ b/aiohttp/web_routedef.py @@ -1,18 +1,7 @@ import abc import os # noqa -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterator, - List, - Optional, - Sequence, - Type, - Union, - overload, -) +from collections.abc import Callable, Iterator, Sequence +from typing import TYPE_CHECKING, Any, Union, overload import attr @@ -48,11 +37,11 @@ class AbstractRouteDef(abc.ABC): @abc.abstractmethod - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + def register(self, router: UrlDispatcher) -> list[AbstractRoute]: pass # pragma: no cover -_HandlerType = Union[Type[AbstractView], Handler] +_HandlerType = Union[type[AbstractView], Handler] @attr.s(auto_attribs=True, frozen=True, repr=False, slots=True) @@ -60,7 +49,7 @@ class RouteDef(AbstractRouteDef): method: str path: str handler: _HandlerType - kwargs: Dict[str, Any] + kwargs: dict[str, Any] def __repr__(self) -> str: info = [] @@ -70,7 +59,7 @@ def __repr__(self) -> str: method=self.method, path=self.path, handler=self.handler, info="".join(info) ) - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + def register(self, router: UrlDispatcher) -> list[AbstractRoute]: if self.method in hdrs.METH_ALL: reg = getattr(router, "add_" + self.method.lower()) return [reg(self.path, self.handler, **self.kwargs)] @@ -84,7 +73,7 @@ def register(self, router: UrlDispatcher) -> List[AbstractRoute]: class StaticDef(AbstractRouteDef): prefix: str path: PathLike - kwargs: Dict[str, Any] + kwargs: dict[str, Any] def __repr__(self) -> str: info = [] @@ -94,7 +83,7 @@ def __repr__(self) -> str: prefix=self.prefix, path=self.path, info="".join(info) ) - def register(self, router: UrlDispatcher) -> List[AbstractRoute]: + def register(self, router: UrlDispatcher) -> list[AbstractRoute]: resource = router.add_static(self.prefix, self.path, **self.kwargs) routes = resource.get_info().get("routes", {}) return list(routes.values()) @@ -116,7 +105,7 @@ def get( path: str, handler: _HandlerType, *, - name: Optional[str] = None, + name: str | None = None, allow_head: bool = True, **kwargs: Any, ) -> RouteDef: @@ -141,7 +130,7 @@ def delete(path: str, handler: _HandlerType, **kwargs: Any) -> RouteDef: return route(hdrs.METH_DELETE, path, handler, **kwargs) -def view(path: str, handler: Type[AbstractView], **kwargs: Any) -> RouteDef: +def view(path: str, handler: type[AbstractView], **kwargs: Any) -> RouteDef: return route(hdrs.METH_ANY, path, handler, **kwargs) @@ -156,7 +145,7 @@ class RouteTableDef(Sequence[AbstractRouteDef]): """Route definition table""" def __init__(self) -> None: - self._items: List[AbstractRouteDef] = [] + self._items: list[AbstractRouteDef] = [] def __repr__(self) -> str: return f"" @@ -165,7 +154,7 @@ def __repr__(self) -> str: def __getitem__(self, index: int) -> AbstractRouteDef: ... @overload - def __getitem__(self, index: slice) -> List[AbstractRouteDef]: ... + def __getitem__(self, index: slice) -> list[AbstractRouteDef]: ... def __getitem__(self, index): # type: ignore[no-untyped-def] return self._items[index] diff --git a/aiohttp/web_runner.py b/aiohttp/web_runner.py index bcfec727c84..c66c9359e27 100644 --- a/aiohttp/web_runner.py +++ b/aiohttp/web_runner.py @@ -3,7 +3,7 @@ import socket import warnings from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, List, Optional, Set +from typing import TYPE_CHECKING, Any from yarl import URL @@ -48,7 +48,7 @@ def __init__( runner: "BaseRunner", *, shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, + ssl_context: SSLContext | None = None, backlog: int = 128, ) -> None: if runner.server is None: @@ -60,7 +60,7 @@ def __init__( self._runner = runner self._ssl_context = ssl_context self._backlog = backlog - self._server: Optional[asyncio.AbstractServer] = None + self._server: asyncio.AbstractServer | None = None @property @abstractmethod @@ -85,14 +85,14 @@ class TCPSite(BaseSite): def __init__( self, runner: "BaseRunner", - host: Optional[str] = None, - port: Optional[int] = None, + host: str | None = None, + port: int | None = None, *, shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, + ssl_context: SSLContext | None = None, backlog: int = 128, - reuse_address: Optional[bool] = None, - reuse_port: Optional[bool] = None, + reuse_address: bool | None = None, + reuse_port: bool | None = None, ) -> None: super().__init__( runner, @@ -138,7 +138,7 @@ def __init__( path: PathLike, *, shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, + ssl_context: SSLContext | None = None, backlog: int = 128, ) -> None: super().__init__( @@ -207,7 +207,7 @@ def __init__( sock: socket.socket, *, shutdown_timeout: float = 60.0, - ssl_context: Optional[SSLContext] = None, + ssl_context: SSLContext | None = None, backlog: int = 128, ) -> None: super().__init__( @@ -251,17 +251,17 @@ def __init__( ) -> None: self._handle_signals = handle_signals self._kwargs = kwargs - self._server: Optional[Server] = None - self._sites: List[BaseSite] = [] + self._server: Server | None = None + self._sites: list[BaseSite] = [] self._shutdown_timeout = shutdown_timeout @property - def server(self) -> Optional[Server]: + def server(self) -> Server | None: return self._server @property - def addresses(self) -> List[Any]: - ret: List[Any] = [] + def addresses(self) -> list[Any]: + ret: list[Any] = [] for site in self._sites: server = site._server if server is not None: @@ -272,7 +272,7 @@ def addresses(self) -> List[Any]: return ret @property - def sites(self) -> Set[BaseSite]: + def sites(self) -> set[BaseSite]: return set(self._sites) async def setup(self) -> None: @@ -374,8 +374,7 @@ def __init__( super().__init__(handle_signals=handle_signals, **kwargs) if not isinstance(app, Application): raise TypeError( - "The first argument should be web.Application " - "instance, got {!r}".format(app) + "The first argument should be web.Application " f"instance, got {app!r}" ) self._app = app diff --git a/aiohttp/web_server.py b/aiohttp/web_server.py index 328aca1e405..778e6cf81c4 100644 --- a/aiohttp/web_server.py +++ b/aiohttp/web_server.py @@ -17,13 +17,13 @@ def __init__( self, handler: _RequestHandler, *, - request_factory: Optional[_RequestFactory] = None, + request_factory: _RequestFactory | None = None, handler_cancellation: bool = False, - loop: Optional[asyncio.AbstractEventLoop] = None, + loop: asyncio.AbstractEventLoop | None = None, **kwargs: Any, ) -> None: self._loop = loop or asyncio.get_running_loop() - self._connections: Dict[RequestHandler, asyncio.Transport] = {} + self._connections: dict[RequestHandler, asyncio.Transport] = {} self._kwargs = kwargs # requests_count is the number of requests being processed by the server # for the lifetime of the server. @@ -33,7 +33,7 @@ def __init__( self.handler_cancellation = handler_cancellation @property - def connections(self) -> List[RequestHandler]: + def connections(self) -> list[RequestHandler]: return list(self._connections.keys()) def connection_made( @@ -42,7 +42,7 @@ def connection_made( self._connections[handler] = transport def connection_lost( - self, handler: RequestHandler, exc: Optional[BaseException] = None + self, handler: RequestHandler, exc: BaseException | None = None ) -> None: if handler in self._connections: if handler._task_handler: @@ -66,7 +66,7 @@ def pre_shutdown(self) -> None: for conn in self._connections: conn.close() - async def shutdown(self, timeout: Optional[float] = None) -> None: + async def shutdown(self, timeout: float | None = None) -> None: coros = (conn.shutdown(timeout) for conn in self._connections) await asyncio.gather(*coros) self._connections.clear() diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 61766f0c5c6..40d29dda794 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -10,33 +10,21 @@ import re import sys import warnings -from functools import wraps -from pathlib import Path -from types import MappingProxyType -from typing import ( - TYPE_CHECKING, - Any, +from collections.abc import ( Awaitable, Callable, Container, - Dict, - Final, Generator, Iterable, Iterator, - List, Mapping, - NoReturn, - Optional, - Pattern, - Set, Sized, - Tuple, - Type, - TypedDict, - Union, - cast, ) +from functools import wraps +from pathlib import Path +from re import Pattern +from types import MappingProxyType +from typing import TYPE_CHECKING, Any, Final, NoReturn, Optional, TypedDict, cast from yarl import URL, __version__ as yarl_version @@ -74,17 +62,13 @@ if TYPE_CHECKING: from .web_app import Application - BaseDict = Dict[str, str] + BaseDict = dict[str, str] else: BaseDict = dict -CIRCULAR_SYMLINK_ERROR = ( - (OSError,) - if sys.version_info < (3, 10) and sys.platform.startswith("win32") - else (RuntimeError,) if sys.version_info < (3, 13) else () -) +CIRCULAR_SYMLINK_ERROR = (RuntimeError,) if sys.version_info < (3, 13) else () -YARL_VERSION: Final[Tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) +YARL_VERSION: Final[tuple[int, ...]] = tuple(map(int, yarl_version.split(".")[:2])) HTTP_METHOD_RE: Final[Pattern[str]] = re.compile( r"^[0-9A-Za-z!#\$%&'\*\+\-\.\^_`\|~]+$" @@ -95,8 +79,8 @@ PATH_SEP: Final[str] = re.escape("/") -_ExpectHandler = Callable[[Request], Awaitable[Optional[StreamResponse]]] -_Resolve = Tuple[Optional["UrlMappingMatchInfo"], Set[str]] +_ExpectHandler = Callable[[Request], Awaitable[StreamResponse | None]] +_Resolve = tuple[Optional["UrlMappingMatchInfo"], set[str]] html_escape = functools.partial(html.escape, quote=True) @@ -121,11 +105,11 @@ class _InfoDict(TypedDict, total=False): class AbstractResource(Sized, Iterable["AbstractRoute"]): - def __init__(self, *, name: Optional[str] = None) -> None: + def __init__(self, *, name: str | None = None) -> None: self._name = name @property - def name(self) -> Optional[str]: + def name(self) -> str | None: return self._name @property @@ -171,10 +155,10 @@ class AbstractRoute(abc.ABC): def __init__( self, method: str, - handler: Union[Handler, Type[AbstractView]], + handler: Handler | type[AbstractView], *, - expect_handler: Optional[_ExpectHandler] = None, - resource: Optional[AbstractResource] = None, + expect_handler: _ExpectHandler | None = None, + resource: AbstractResource | None = None, ) -> None: if expect_handler is None: @@ -233,11 +217,11 @@ def handler(self) -> Handler: @property @abc.abstractmethod - def name(self) -> Optional[str]: + def name(self) -> str | None: """Optional route's name, always equals to resource's name.""" @property - def resource(self) -> Optional[AbstractResource]: + def resource(self) -> AbstractResource | None: return self._resource @abc.abstractmethod @@ -248,7 +232,7 @@ def get_info(self) -> _InfoDict: def url_for(self, *args: str, **kwargs: str) -> URL: """Construct url for route with additional params.""" - async def handle_expect_header(self, request: Request) -> Optional[StreamResponse]: + async def handle_expect_header(self, request: Request) -> StreamResponse | None: return await self._expect_handler(request) @@ -256,11 +240,11 @@ class UrlMappingMatchInfo(BaseDict, AbstractMatchInfo): __slots__ = ("_route", "_apps", "_current_app", "_frozen") - def __init__(self, match_dict: Dict[str, str], route: AbstractRoute) -> None: + def __init__(self, match_dict: dict[str, str], route: AbstractRoute) -> None: super().__init__(match_dict) self._route = route - self._apps: List[Application] = [] - self._current_app: Optional[Application] = None + self._apps: list[Application] = [] + self._current_app: Application | None = None self._frozen = False @property @@ -276,14 +260,14 @@ def expect_handler(self) -> _ExpectHandler: return self._route.handle_expect_header @property - def http_exception(self) -> Optional[HTTPException]: + def http_exception(self) -> HTTPException | None: return None def get_info(self) -> _InfoDict: # type: ignore[override] return self._route.get_info() @property - def apps(self) -> Tuple["Application", ...]: + def apps(self) -> tuple["Application", ...]: return tuple(self._apps) def add_app(self, app: "Application") -> None: @@ -304,9 +288,7 @@ def current_app(self, app: "Application") -> None: if DEBUG: # pragma: no cover if app not in self._apps: raise RuntimeError( - "Expected one of the following apps {!r}, got {!r}".format( - self._apps, app - ) + f"Expected one of the following apps {self._apps!r}, got {app!r}" ) self._current_app = app @@ -330,9 +312,7 @@ def http_exception(self) -> HTTPException: return self._exception def __repr__(self) -> str: - return "".format( - self._exception.status, self._exception.reason - ) + return f"" async def _default_expect_handler(request: Request) -> None: @@ -352,18 +332,18 @@ async def _default_expect_handler(request: Request) -> None: class Resource(AbstractResource): - def __init__(self, *, name: Optional[str] = None) -> None: + def __init__(self, *, name: str | None = None) -> None: super().__init__(name=name) - self._routes: Dict[str, ResourceRoute] = {} - self._any_route: Optional[ResourceRoute] = None - self._allowed_methods: Set[str] = set() + self._routes: dict[str, ResourceRoute] = {} + self._any_route: ResourceRoute | None = None + self._allowed_methods: set[str] = set() def add_route( self, method: str, - handler: Union[Type[AbstractView], Handler], + handler: type[AbstractView] | Handler, *, - expect_handler: Optional[_ExpectHandler] = None, + expect_handler: _ExpectHandler | None = None, ) -> "ResourceRoute": if route := self._routes.get(method, self._any_route): raise RuntimeError( @@ -393,7 +373,7 @@ async def resolve(self, request: Request) -> _Resolve: return None, self._allowed_methods @abc.abstractmethod - def _match(self, path: str) -> Optional[Dict[str, str]]: + def _match(self, path: str) -> dict[str, str] | None: pass # pragma: no cover def __len__(self) -> int: @@ -406,7 +386,7 @@ def __iter__(self) -> Iterator["ResourceRoute"]: class PlainResource(Resource): - def __init__(self, path: str, *, name: Optional[str] = None) -> None: + def __init__(self, path: str, *, name: str | None = None) -> None: super().__init__(name=name) assert not path or path.startswith("/") self._path = path @@ -425,7 +405,7 @@ def add_prefix(self, prefix: str) -> None: assert len(prefix) > 1 self._path = prefix + self._path - def _match(self, path: str) -> Optional[Dict[str, str]]: + def _match(self, path: str) -> dict[str, str] | None: # string comparison is about 10 times faster than regexp matching if self._path == path: return {} @@ -451,7 +431,7 @@ class DynamicResource(Resource): DYN_WITH_RE = re.compile(r"\{(?P[_a-zA-Z][_a-zA-Z0-9]*):(?P.+)\}") GOOD = r"[^{}/]+" - def __init__(self, path: str, *, name: Optional[str] = None) -> None: + def __init__(self, path: str, *, name: str | None = None) -> None: super().__init__(name=name) self._orig_path = path pattern = "" @@ -496,7 +476,7 @@ def add_prefix(self, prefix: str) -> None: self._pattern = re.compile(re.escape(prefix) + self._pattern.pattern) self._formatter = prefix + self._formatter - def _match(self, path: str) -> Optional[Dict[str, str]]: + def _match(self, path: str) -> dict[str, str] | None: match = self._pattern.fullmatch(path) if match is None: return None @@ -516,13 +496,11 @@ def url_for(self, **parts: str) -> URL: def __repr__(self) -> str: name = "'" + self.name + "' " if self.name is not None else "" - return "".format( - name=name, formatter=self._formatter - ) + return f"" class PrefixResource(AbstractResource): - def __init__(self, prefix: str, *, name: Optional[str] = None) -> None: + def __init__(self, prefix: str, *, name: str | None = None) -> None: assert not prefix or prefix.startswith("/"), prefix assert prefix in ("", "/") or not prefix.endswith("/"), prefix super().__init__(name=name) @@ -554,8 +532,8 @@ def __init__( prefix: str, directory: PathLike, *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, + name: str | None = None, + expect_handler: _ExpectHandler | None = None, chunk_size: int = 256 * 1024, show_index: bool = False, follow_symlinks: bool = False, @@ -589,7 +567,7 @@ def url_for( # type: ignore[override] self, *, filename: PathLike, - append_version: Optional[bool] = None, + append_version: bool | None = None, ) -> URL: if append_version is None: append_version = self._append_version @@ -751,9 +729,7 @@ def _directory_as_html(self, dir_path: Path) -> str: def __repr__(self) -> str: name = "'" + self.name + "'" if self.name is not None else "" - return " {directory!r}>".format( - name=name, path=self._prefix, directory=self._directory - ) + return f" {self._directory!r}>" class PrefixedSubAppResource(PrefixResource): @@ -797,9 +773,7 @@ def __iter__(self) -> Iterator[AbstractRoute]: return iter(self._app.router.routes()) def __repr__(self) -> str: - return " {app!r}>".format( - prefix=self._prefix, app=self._app - ) + return f" {self._app!r}>" class AbstractRuleMatching(abc.ABC): @@ -908,22 +882,20 @@ class ResourceRoute(AbstractRoute): def __init__( self, method: str, - handler: Union[Handler, Type[AbstractView]], + handler: Handler | type[AbstractView], resource: AbstractResource, *, - expect_handler: Optional[_ExpectHandler] = None, + expect_handler: _ExpectHandler | None = None, ) -> None: super().__init__( method, handler, expect_handler=expect_handler, resource=resource ) def __repr__(self) -> str: - return " {handler!r}".format( - method=self.method, resource=self._resource, handler=self.handler - ) + return f" {self.handler!r}" @property - def name(self) -> Optional[str]: + def name(self) -> str | None: if self._resource is None: return None return self._resource.name @@ -947,7 +919,7 @@ def url_for(self, *args: str, **kwargs: str) -> URL: raise RuntimeError(".url_for() is not allowed for SystemRoute") @property - def name(self) -> Optional[str]: + def name(self) -> str | None: return None def get_info(self) -> _InfoDict: @@ -965,14 +937,14 @@ def reason(self) -> str: return self._http_exception.reason def __repr__(self) -> str: - return "".format(self=self) + return f"" class View(AbstractView): async def _iter(self) -> StreamResponse: if self.request.method not in hdrs.METH_ALL: self._raise_allowed_methods() - method: Optional[Callable[[], Awaitable[StreamResponse]]] + method: Callable[[], Awaitable[StreamResponse]] | None method = getattr(self, self.request.method.lower(), None) if method is None: self._raise_allowed_methods() @@ -989,7 +961,7 @@ def _raise_allowed_methods(self) -> NoReturn: class ResourcesView(Sized, Iterable[AbstractResource], Container[AbstractResource]): - def __init__(self, resources: List[AbstractResource]) -> None: + def __init__(self, resources: list[AbstractResource]) -> None: self._resources = resources def __len__(self) -> int: @@ -1003,8 +975,8 @@ def __contains__(self, resource: object) -> bool: class RoutesView(Sized, Iterable[AbstractRoute], Container[AbstractRoute]): - def __init__(self, resources: List[AbstractResource]): - self._routes: List[AbstractRoute] = [] + def __init__(self, resources: list[AbstractResource]): + self._routes: list[AbstractRoute] = [] for resource in resources: for route in resource: self._routes.append(route) @@ -1025,14 +997,14 @@ class UrlDispatcher(AbstractRouter, Mapping[str, AbstractResource]): def __init__(self) -> None: super().__init__() - self._resources: List[AbstractResource] = [] - self._named_resources: Dict[str, AbstractResource] = {} + self._resources: list[AbstractResource] = [] + self._named_resources: dict[str, AbstractResource] = {} self._resource_index: dict[str, list[AbstractResource]] = {} - self._matched_sub_app_resources: List[MatchedSubAppResource] = [] + self._matched_sub_app_resources: list[MatchedSubAppResource] = [] async def resolve(self, request: Request) -> UrlMappingMatchInfo: resource_index = self._resource_index - allowed_methods: Set[str] = set() + allowed_methods: set[str] = set() # Walk the url parts looking for candidates. We walk the url backwards # to ensure the most explicit match is found first. If there are multiple @@ -1112,15 +1084,15 @@ def register_resource(self, resource: AbstractResource) -> None: ) if not part.isidentifier(): raise ValueError( - "Incorrect route name {!r}, " + f"Incorrect route name {name!r}, " "the name should be a sequence of " "python identifiers separated " - "by dash, dot or column".format(name) + "by dash, dot or column" ) if name in self._named_resources: raise ValueError( - "Duplicate {!r}, " - "already handled by {!r}".format(name, self._named_resources[name]) + f"Duplicate {name!r}, " + f"already handled by {self._named_resources[name]!r}" ) self._named_resources[name] = resource self._resources.append(resource) @@ -1155,7 +1127,7 @@ def unindex_resource(self, resource: AbstractResource) -> None: resource_key = self._get_resource_index_key(resource) self._resource_index[resource_key].remove(resource) - def add_resource(self, path: str, *, name: Optional[str] = None) -> Resource: + def add_resource(self, path: str, *, name: str | None = None) -> Resource: if path and not path.startswith("/"): raise ValueError("path should be started with / or be empty") # Reuse last added resource if path and name are the same @@ -1175,10 +1147,10 @@ def add_route( self, method: str, path: str, - handler: Union[Handler, Type[AbstractView]], + handler: Handler | type[AbstractView], *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, + name: str | None = None, + expect_handler: _ExpectHandler | None = None, ) -> AbstractRoute: resource = self.add_resource(path, name=name) return resource.add_route(method, handler, expect_handler=expect_handler) @@ -1188,8 +1160,8 @@ def add_static( prefix: str, path: PathLike, *, - name: Optional[str] = None, - expect_handler: Optional[_ExpectHandler] = None, + name: str | None = None, + expect_handler: _ExpectHandler | None = None, chunk_size: int = 256 * 1024, show_index: bool = False, follow_symlinks: bool = False, @@ -1230,7 +1202,7 @@ def add_get( path: str, handler: Handler, *, - name: Optional[str] = None, + name: str | None = None, allow_head: bool = True, **kwargs: Any, ) -> AbstractRoute: @@ -1261,7 +1233,7 @@ def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRout return self.add_route(hdrs.METH_DELETE, path, handler, **kwargs) def add_view( - self, path: str, handler: Type[AbstractView], **kwargs: Any + self, path: str, handler: type[AbstractView], **kwargs: Any ) -> AbstractRoute: """Shortcut for add_route with ANY methods for a class-based view.""" return self.add_route(hdrs.METH_ANY, path, handler, **kwargs) @@ -1271,7 +1243,7 @@ def freeze(self) -> None: for resource in self._resources: resource.freeze() - def add_routes(self, routes: Iterable[AbstractRouteDef]) -> List[AbstractRoute]: + def add_routes(self, routes: Iterable[AbstractRouteDef]) -> list[AbstractRoute]: """Append routes to route table. Parameter should be a sequence of RouteDef objects. diff --git a/aiohttp/web_ws.py b/aiohttp/web_ws.py index 575f9a3dc85..2e55038a130 100644 --- a/aiohttp/web_ws.py +++ b/aiohttp/web_ws.py @@ -4,7 +4,8 @@ import hashlib import json import sys -from typing import Any, Final, Iterable, Optional, Tuple, Union, cast +from collections.abc import Iterable +from typing import Any, Final, cast import attr from multidict import CIMultiDict @@ -53,7 +54,7 @@ @attr.s(auto_attribs=True, frozen=True, slots=True) class WebSocketReady: ok: bool - protocol: Optional[str] + protocol: str | None def __bool__(self) -> bool: return self.ok @@ -62,30 +63,30 @@ def __bool__(self) -> bool: class WebSocketResponse(StreamResponse): _length_check: bool = False - _ws_protocol: Optional[str] = None - _writer: Optional[WebSocketWriter] = None - _reader: Optional[WebSocketDataQueue] = None + _ws_protocol: str | None = None + _writer: WebSocketWriter | None = None + _reader: WebSocketDataQueue | None = None _closed: bool = False _closing: bool = False _conn_lost: int = 0 - _close_code: Optional[int] = None - _loop: Optional[asyncio.AbstractEventLoop] = None + _close_code: int | None = None + _loop: asyncio.AbstractEventLoop | None = None _waiting: bool = False - _close_wait: Optional[asyncio.Future[None]] = None - _exception: Optional[BaseException] = None + _close_wait: asyncio.Future[None] | None = None + _exception: BaseException | None = None _heartbeat_when: float = 0.0 - _heartbeat_cb: Optional[asyncio.TimerHandle] = None - _pong_response_cb: Optional[asyncio.TimerHandle] = None - _ping_task: Optional[asyncio.Task[None]] = None + _heartbeat_cb: asyncio.TimerHandle | None = None + _pong_response_cb: asyncio.TimerHandle | None = None + _ping_task: asyncio.Task[None] | None = None def __init__( self, *, timeout: float = 10.0, - receive_timeout: Optional[float] = None, + receive_timeout: float | None = None, autoclose: bool = True, autoping: bool = True, - heartbeat: Optional[float] = None, + heartbeat: float | None = None, protocols: Iterable[str] = (), compress: bool = True, max_msg_size: int = 4 * 1024 * 1024, @@ -100,7 +101,7 @@ def __init__( self._heartbeat = heartbeat if heartbeat is not None: self._pong_heartbeat = heartbeat / 2.0 - self._compress: Union[bool, int] = compress + self._compress: bool | int = compress self._max_msg_size = max_msg_size self._writer_limit = writer_limit @@ -220,25 +221,23 @@ async def prepare(self, request: BaseRequest) -> AbstractStreamWriter: def _handshake( self, request: BaseRequest - ) -> Tuple["CIMultiDict[str]", Optional[str], int, bool]: + ) -> tuple["CIMultiDict[str]", str | None, int, bool]: headers = request.headers if "websocket" != headers.get(hdrs.UPGRADE, "").lower().strip(): raise HTTPBadRequest( text=( - "No WebSocket UPGRADE hdr: {}\n Can " + f"No WebSocket UPGRADE hdr: {headers.get(hdrs.UPGRADE)}\n Can " '"Upgrade" only to "WebSocket".' - ).format(headers.get(hdrs.UPGRADE)) + ) ) if "upgrade" not in headers.get(hdrs.CONNECTION, "").lower(): raise HTTPBadRequest( - text="No CONNECTION upgrade hdr: {}".format( - headers.get(hdrs.CONNECTION) - ) + text=f"No CONNECTION upgrade hdr: {headers.get(hdrs.CONNECTION)}" ) # find common sub-protocol between client and server - protocol: Optional[str] = None + protocol: str | None = None if hdrs.SEC_WEBSOCKET_PROTOCOL in headers: req_protocols = [ str(proto.strip()) @@ -304,7 +303,7 @@ def _handshake( notakeover, ) - def _pre_start(self, request: BaseRequest) -> Tuple[Optional[str], WebSocketWriter]: + def _pre_start(self, request: BaseRequest) -> tuple[str | None, WebSocketWriter]: self._loop = request._loop headers, protocol, compress, notakeover = self._handshake(request) @@ -326,7 +325,7 @@ def _pre_start(self, request: BaseRequest) -> Tuple[Optional[str], WebSocketWrit return protocol, writer def _post_start( - self, request: BaseRequest, protocol: Optional[str], writer: WebSocketWriter + self, request: BaseRequest, protocol: str | None, writer: WebSocketWriter ) -> None: self._ws_protocol = protocol self._writer = writer @@ -363,15 +362,15 @@ def closed(self) -> bool: return self._closed @property - def close_code(self) -> Optional[int]: + def close_code(self) -> int | None: return self._close_code @property - def ws_protocol(self) -> Optional[str]: + def ws_protocol(self) -> str | None: return self._ws_protocol @property - def compress(self) -> Union[int, bool]: + def compress(self) -> int | bool: return self._compress def get_extra_info(self, name: str, default: Any = None) -> Any: @@ -387,7 +386,7 @@ def get_extra_info(self, name: str, default: Any = None) -> Any: return default return transport.get_extra_info(name, default) - def exception(self) -> Optional[BaseException]: + def exception(self) -> BaseException | None: return self._exception async def ping(self, message: bytes = b"") -> None: @@ -402,14 +401,14 @@ async def pong(self, message: bytes = b"") -> None: await self._writer.send_frame(message, WSMsgType.PONG) async def send_frame( - self, message: bytes, opcode: WSMsgType, compress: Optional[int] = None + self, message: bytes, opcode: WSMsgType, compress: int | None = None ) -> None: """Send a frame over the websocket.""" if self._writer is None: raise RuntimeError("Call .prepare() first") await self._writer.send_frame(message, opcode, compress) - async def send_str(self, data: str, compress: Optional[int] = None) -> None: + async def send_str(self, data: str, compress: int | None = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, str): @@ -418,7 +417,7 @@ async def send_str(self, data: str, compress: Optional[int] = None) -> None: data.encode("utf-8"), WSMsgType.TEXT, compress=compress ) - async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: + async def send_bytes(self, data: bytes, compress: int | None = None) -> None: if self._writer is None: raise RuntimeError("Call .prepare() first") if not isinstance(data, (bytes, bytearray, memoryview)): @@ -428,7 +427,7 @@ async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None: async def send_json( self, data: Any, - compress: Optional[int] = None, + compress: int | None = None, *, dumps: JSONEncoder = json.dumps, ) -> None: @@ -514,7 +513,7 @@ def _close_transport(self) -> None: if self._req is not None and self._req.transport is not None: self._req.transport.close() - async def receive(self, timeout: Optional[float] = None) -> WSMessage: + async def receive(self, timeout: float | None = None) -> WSMessage: if self._reader is None: raise RuntimeError("Call .prepare() first") @@ -588,7 +587,7 @@ async def receive(self, timeout: Optional[float] = None) -> WSMessage: return msg - async def receive_str(self, *, timeout: Optional[float] = None) -> str: + async def receive_str(self, *, timeout: float | None = None) -> str: msg = await self.receive(timeout) if msg.type is not WSMsgType.TEXT: raise WSMessageTypeError( @@ -596,7 +595,7 @@ async def receive_str(self, *, timeout: Optional[float] = None) -> str: ) return cast(str, msg.data) - async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: + async def receive_bytes(self, *, timeout: float | None = None) -> bytes: msg = await self.receive(timeout) if msg.type is not WSMsgType.BINARY: raise WSMessageTypeError( @@ -605,7 +604,7 @@ async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes: return cast(bytes, msg.data) async def receive_json( - self, *, loads: JSONDecoder = json.loads, timeout: Optional[float] = None + self, *, loads: JSONDecoder = json.loads, timeout: float | None = None ) -> Any: data = await self.receive_str(timeout=timeout) return loads(data) diff --git a/aiohttp/worker.py b/aiohttp/worker.py index f7281bfde75..2c0e59530a2 100644 --- a/aiohttp/worker.py +++ b/aiohttp/worker.py @@ -43,9 +43,9 @@ class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported] def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover super().__init__(*args, **kw) - self._task: Optional[asyncio.Task[None]] = None + self._task: asyncio.Task[None] | None = None self.exit_code = 0 - self._notify_waiter: Optional[asyncio.Future[bool]] = None + self._notify_waiter: asyncio.Future[bool] | None = None def init_process(self) -> None: # create new event_loop after fork @@ -84,7 +84,7 @@ async def _run(self) -> None: else: raise RuntimeError( "wsgi app should be either Application or " - "async function returning Application, got {}".format(self.wsgi) + f"async function returning Application, got {self.wsgi}" ) if runner is None: @@ -191,7 +191,7 @@ def init_signals(self) -> None: # Reset signals so Gunicorn doesn't swallow subprocess return codes # See: https://github.com/aio-libs/aiohttp/issues/6130 - def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None: + def handle_quit(self, sig: int, frame: FrameType | None) -> None: self.alive = False # worker_int callback @@ -200,7 +200,7 @@ def handle_quit(self, sig: int, frame: Optional[FrameType]) -> None: # wakeup closing process self._notify_waiter_done() - def handle_abort(self, sig: int, frame: Optional[FrameType]) -> None: + def handle_abort(self, sig: int, frame: FrameType | None) -> None: self.alive = False self.exit_code = 1 self.cfg.worker_abort(self) diff --git a/docs/conf.py b/docs/conf.py index a449f223e1d..184ad7816cf 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,7 +12,6 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import io import os import re from pathlib import Path diff --git a/docs/contributing.rst b/docs/contributing.rst index 9abd367a150..d0741ee65ea 100644 --- a/docs/contributing.rst +++ b/docs/contributing.rst @@ -145,19 +145,6 @@ Please take a look on the produced output. Any extra texts (print statements and so on) should be removed. -.. note:: - - If you see that CI build is failing on a specific Python version and - you don't have this version on your computer, you can use the helper to - run it (only if you have docker):: - - make test-[-no-extensions] - - For example, if you want to run tests for python3.10 - without extensions, you can run this command:: - - make test-3.10-no-extensions - Code coverage ------------- diff --git a/examples/background_tasks.py b/examples/background_tasks.py index 4c026b81d01..121111d6857 100755 --- a/examples/background_tasks.py +++ b/examples/background_tasks.py @@ -1,15 +1,15 @@ #!/usr/bin/env python3 """Example of aiohttp.web.Application.on_startup signal handler""" import asyncio +from collections.abc import AsyncIterator from contextlib import suppress -from typing import AsyncIterator, List import valkey.asyncio as valkey from aiohttp import web valkey_listener = web.AppKey("valkey_listener", asyncio.Task[None]) -websockets = web.AppKey("websockets", List[web.WebSocketResponse]) +websockets = web.AppKey("websockets", list[web.WebSocketResponse]) async def websocket_handler(request): @@ -57,7 +57,7 @@ async def background_tasks(app: web.Application) -> AsyncIterator[None]: def init(): app = web.Application() - l: List[web.WebSocketResponse] = [] + l: list[web.WebSocketResponse] = [] app[websockets] = l app.router.add_get("/news", websocket_handler) app.cleanup_ctx.append(background_tasks) diff --git a/examples/combined_middleware.py b/examples/combined_middleware.py index 8646a182b98..a4e50b07414 100644 --- a/examples/combined_middleware.py +++ b/examples/combined_middleware.py @@ -18,7 +18,7 @@ import logging import time from http import HTTPStatus -from typing import TYPE_CHECKING, Set, Union +from typing import TYPE_CHECKING from aiohttp import ( ClientHandlerType, @@ -92,7 +92,7 @@ async def __call__( return await handler(request) -DEFAULT_RETRY_STATUSES: Set[HTTPStatus] = { +DEFAULT_RETRY_STATUSES: set[HTTPStatus] = { HTTPStatus.TOO_MANY_REQUESTS, HTTPStatus.INTERNAL_SERVER_ERROR, HTTPStatus.BAD_GATEWAY, @@ -107,7 +107,7 @@ class RetryMiddleware: def __init__( self, max_retries: int = 3, - retry_statuses: Union[Set[HTTPStatus], None] = None, + retry_statuses: set[HTTPStatus] | None = None, initial_delay: float = 1.0, backoff_factor: float = 2.0, ) -> None: @@ -122,7 +122,7 @@ async def __call__( handler: ClientHandlerType, ) -> ClientResponse: """Execute request with retry logic.""" - last_response: Union[ClientResponse, None] = None + last_response: ClientResponse | None = None delay = self.initial_delay for attempt in range(self.max_retries + 1): diff --git a/examples/fake_server.py b/examples/fake_server.py index 2cfe3ed710e..bdfa671036c 100755 --- a/examples/fake_server.py +++ b/examples/fake_server.py @@ -3,7 +3,6 @@ import pathlib import socket import ssl -from typing import List import aiohttp from aiohttp import web @@ -25,7 +24,7 @@ async def resolve( host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET, - ) -> List[ResolveResult]: + ) -> list[ResolveResult]: fake_port = self._fakes.get(host) if fake_port is not None: return [ diff --git a/examples/logging_middleware.py b/examples/logging_middleware.py index b6345953db2..37027ea0acb 100644 --- a/examples/logging_middleware.py +++ b/examples/logging_middleware.py @@ -13,7 +13,8 @@ import json import logging import time -from typing import Any, Coroutine, List +from collections.abc import Coroutine +from typing import Any from aiohttp import ClientHandlerType, ClientRequest, ClientResponse, ClientSession, web @@ -141,7 +142,7 @@ async def run_tests() -> None: # Test 6: Multiple concurrent requests print("\n=== Test 6: Multiple concurrent requests ===") - coros: List[Coroutine[Any, Any, ClientResponse]] = [] + coros: list[Coroutine[Any, Any, ClientResponse]] = [] for i in range(3): coro = session.get(f"http://localhost:8080/hello/User{i}") coros.append(coro) diff --git a/examples/retry_middleware.py b/examples/retry_middleware.py index c8fa829455a..157bd5a268c 100644 --- a/examples/retry_middleware.py +++ b/examples/retry_middleware.py @@ -13,14 +13,14 @@ import asyncio import logging from http import HTTPStatus -from typing import TYPE_CHECKING, Dict, List, Set, Union +from typing import TYPE_CHECKING from aiohttp import ClientHandlerType, ClientRequest, ClientResponse, ClientSession, web logging.basicConfig(level=logging.INFO) _LOGGER = logging.getLogger(__name__) -DEFAULT_RETRY_STATUSES: Set[HTTPStatus] = { +DEFAULT_RETRY_STATUSES: set[HTTPStatus] = { HTTPStatus.TOO_MANY_REQUESTS, HTTPStatus.INTERNAL_SERVER_ERROR, HTTPStatus.BAD_GATEWAY, @@ -35,7 +35,7 @@ class RetryMiddleware: def __init__( self, max_retries: int = 3, - retry_statuses: Union[Set[HTTPStatus], None] = None, + retry_statuses: set[HTTPStatus] | None = None, initial_delay: float = 1.0, backoff_factor: float = 2.0, ) -> None: @@ -50,7 +50,7 @@ async def __call__( handler: ClientHandlerType, ) -> ClientResponse: """Execute request with retry logic.""" - last_response: Union[ClientResponse, None] = None + last_response: ClientResponse | None = None delay = self.initial_delay for attempt in range(self.max_retries + 1): @@ -92,8 +92,8 @@ class TestServer: """Test server with stateful endpoints for retry testing.""" def __init__(self) -> None: - self.request_counters: Dict[str, int] = {} - self.status_sequences: Dict[str, List[int]] = { + self.request_counters: dict[str, int] = {} + self.status_sequences: dict[str, list[int]] = { "eventually-ok": [500, 503, 502, 200], # Fails 3 times, then succeeds "always-error": [500, 500, 500, 500], # Always fails "immediate-ok": [200], # Succeeds immediately diff --git a/examples/token_refresh_middleware.py b/examples/token_refresh_middleware.py index 8a7ff963850..4f0a894c76c 100644 --- a/examples/token_refresh_middleware.py +++ b/examples/token_refresh_middleware.py @@ -20,8 +20,9 @@ import logging import secrets import time +from collections.abc import Coroutine from http import HTTPStatus -from typing import TYPE_CHECKING, Any, Coroutine, Dict, List, Union +from typing import TYPE_CHECKING, Any from aiohttp import ( ClientHandlerType, @@ -42,8 +43,8 @@ class TokenRefreshMiddleware: def __init__(self, token_endpoint: str, refresh_token: str) -> None: self.token_endpoint = token_endpoint self.refresh_token = refresh_token - self.access_token: Union[str, None] = None - self.token_expires_at: Union[float, None] = None + self.access_token: str | None = None + self.token_expires_at: float | None = None self._refresh_lock = asyncio.Lock() async def _refresh_access_token(self, session: ClientSession) -> str: @@ -121,8 +122,8 @@ class TestServer: """Test server with JWT-like token authentication.""" def __init__(self) -> None: - self.tokens_db: Dict[str, Dict[str, Union[str, float]]] = {} - self.refresh_tokens_db: Dict[str, Dict[str, Union[str, float]]] = { + self.tokens_db: dict[str, dict[str, str | float]] = {} + self.refresh_tokens_db: dict[str, dict[str, str | float]] = { # Hash of refresh token -> user data hashlib.sha256(b"demo_refresh_token_12345").hexdigest(): { "user_id": "user123", @@ -135,7 +136,7 @@ def generate_access_token(self) -> str: """Generate a secure random access token.""" return secrets.token_urlsafe(32) - async def _process_token_refresh(self, data: Dict[str, str]) -> web.Response: + async def _process_token_refresh(self, data: dict[str, str]) -> web.Response: """Process the token refresh request.""" refresh_token = data.get("refresh_token") @@ -189,7 +190,7 @@ async def handle_token_refresh(self, request: web.Request) -> web.Response: async def verify_bearer_token( self, request: web.Request - ) -> Union[Dict[str, Union[str, float]], None]: + ) -> dict[str, str | float] | None: """Verify bearer token and return user data if valid.""" auth_header = request.headers.get(hdrs.AUTHORIZATION, "") @@ -285,7 +286,7 @@ async def run_tests() -> None: print("\n=== Test 3: Multiple concurrent requests ===") print("(Should only refresh token once)") - coros: List[Coroutine[Any, Any, ClientResponse]] = [] + coros: list[Coroutine[Any, Any, ClientResponse]] = [] for i in range(3): coro = session.get("http://localhost:8080/api/protected") coros.append(coro) diff --git a/examples/web_ws.py b/examples/web_ws.py index 230651b45d0..3190b7b23a0 100755 --- a/examples/web_ws.py +++ b/examples/web_ws.py @@ -6,12 +6,11 @@ # mypy: disallow-any-expr, disallow-any-unimported, disallow-subclassing-any import os -from typing import List from aiohttp import web WS_FILE = os.path.join(os.path.dirname(__file__), "websocket.html") -sockets = web.AppKey("sockets", List[web.WebSocketResponse]) +sockets = web.AppKey("sockets", list[web.WebSocketResponse]) async def wshandler(request: web.Request) -> web.StreamResponse: @@ -54,7 +53,7 @@ async def on_shutdown(app: web.Application) -> None: def init() -> web.Application: app = web.Application() - l: List[web.WebSocketResponse] = [] + l: list[web.WebSocketResponse] = [] app[sockets] = l app.router.add_get("/", wshandler) app.on_shutdown.append(on_shutdown) diff --git a/pyproject.toml b/pyproject.toml index df8b8465348..81a3009e445 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -92,4 +92,4 @@ exclude-modules = "(^aiohttp\\.helpers)" [tool.black] # TODO: Remove when project metadata is moved here. # Black can read the value from [project.requires-python]. -target-version = ["py39", "py310", "py311", "py312"] +target-version = ["py310", "py311", "py312"] diff --git a/setup.cfg b/setup.cfg index 5117eccc043..583bc8f36f2 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,7 +33,6 @@ classifiers = Programming Language :: Python Programming Language :: Python :: 3 - Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 @@ -43,7 +42,7 @@ classifiers = Topic :: Internet :: WWW/HTTP [options] -python_requires = >=3.9 +python_requires = >=3.10 packages = aiohttp aiohttp._websocket @@ -153,9 +152,6 @@ filterwarnings = error ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning ignore:Unclosed client session web.WebSocketResponse: @@ -46,7 +45,7 @@ async def on_shutdown(app: web.Application) -> None: ) app = web.Application() - l: List[web.WebSocketResponse] = [] + l: list[web.WebSocketResponse] = [] app[websockets] = l app.router.add_route("GET", "/", wshandler) app.on_shutdown.append(on_shutdown) diff --git a/tests/autobahn/test_autobahn.py b/tests/autobahn/test_autobahn.py index 0b70aacd471..d6498242143 100644 --- a/tests/autobahn/test_autobahn.py +++ b/tests/autobahn/test_autobahn.py @@ -1,8 +1,9 @@ import json import subprocess import sys +from collections.abc import Generator from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Generator, List +from typing import TYPE_CHECKING, Any import pytest from pytest import TempPathFactory @@ -36,7 +37,7 @@ def build_autobahn_testsuite() -> Generator[None, None, None]: python_on_whales.docker.image.remove(x="autobahn-testsuite") -def get_failed_tests(report_path: str, name: str) -> List[Dict[str, Any]]: +def get_failed_tests(report_path: str, name: str) -> list[dict[str, Any]]: path = Path(report_path) result_summary = json.loads((path / "index.json").read_text())[name] failed_messages = [] diff --git a/tests/conftest.py b/tests/conftest.py index 2be64078659..9ae9c19df11 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,10 +4,11 @@ import socket import ssl import sys +from collections.abc import AsyncIterator, Generator, Iterator from hashlib import md5, sha1, sha256 from pathlib import Path from tempfile import TemporaryDirectory -from typing import Any, AsyncIterator, Generator, Iterator +from typing import Any from unittest import mock from uuid import uuid4 diff --git a/tests/test_benchmarks_client_request.py b/tests/test_benchmarks_client_request.py index 34ae3629f9e..3a56e21d448 100644 --- a/tests/test_benchmarks_client_request.py +++ b/tests/test_benchmarks_client_request.py @@ -2,7 +2,6 @@ import asyncio from http.cookies import BaseCookie -from typing import Union from multidict import CIMultiDict from pytest_codspeed import BenchmarkFixture @@ -132,7 +131,7 @@ def is_closing(self) -> bool: """Swallow is_closing.""" return False - def write(self, data: Union[bytes, bytearray, memoryview]) -> None: + def write(self, data: bytes | bytearray | memoryview) -> None: """Swallow writes.""" class MockProtocol(asyncio.BaseProtocol): diff --git a/tests/test_benchmarks_http_websocket.py b/tests/test_benchmarks_http_websocket.py index 8e6a8bb7bb9..d2761b3687e 100644 --- a/tests/test_benchmarks_http_websocket.py +++ b/tests/test_benchmarks_http_websocket.py @@ -1,7 +1,6 @@ """codspeed benchmarks for http websocket.""" import asyncio -from typing import Union import pytest from pytest_codspeed import BenchmarkFixture @@ -60,7 +59,7 @@ def is_closing(self) -> bool: """Swallow is_closing.""" return False - def write(self, data: Union[bytes, bytearray, memoryview]) -> None: + def write(self, data: bytes | bytearray | memoryview) -> None: """Swallow writes.""" diff --git a/tests/test_benchmarks_web_urldispatcher.py b/tests/test_benchmarks_web_urldispatcher.py index 936ed6320ed..339eaef8a0e 100644 --- a/tests/test_benchmarks_web_urldispatcher.py +++ b/tests/test_benchmarks_web_urldispatcher.py @@ -6,7 +6,7 @@ import random import string from pathlib import Path -from typing import NoReturn, Optional, cast +from typing import NoReturn, cast from unittest import mock import pytest @@ -68,7 +68,7 @@ async def handler(request: web.Request) -> NoReturn: router = app.router request = _mock_request(method="GET", path="/") - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for _ in range(resolve_count): ret = await router.resolve(request) @@ -106,7 +106,7 @@ async def handler(request: web.Request) -> NoReturn: router = app.router request = _mock_request(method="GET", path="/") - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for _ in range(resolve_count): ret = await router.resolve(request) @@ -136,7 +136,7 @@ def test_resolve_static_root_route( router = app.router request = _mock_request(method="GET", path="/") - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for _ in range(resolve_count): ret = await router.resolve(request) @@ -169,7 +169,7 @@ async def handler(request: web.Request) -> NoReturn: router = app.router request = _mock_request(method="GET", path="/api/server/dispatch/1/update") - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for _ in range(resolve_count): ret = await router.resolve(request) @@ -205,7 +205,7 @@ async def handler(request: web.Request) -> NoReturn: for count in range(250) ] - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for request in requests: ret = await router.resolve(request) @@ -245,7 +245,7 @@ async def handler(request: web.Request) -> NoReturn: requests = [(_mock_request(method="GET", path=url), url) for url in urls] - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for request, path in requests: ret = await router.resolve(request) @@ -282,7 +282,7 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for request in requests: ret = await router.resolve(request) @@ -323,7 +323,7 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for request in requests: ret = await router.resolve(request) @@ -362,7 +362,7 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for request in requests: ret = await router.resolve(request) @@ -411,7 +411,7 @@ async def handler(request: web.Request) -> NoReturn: ) ) - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for request in requests: ret = await router.resolve(request) @@ -482,7 +482,7 @@ async def handler(request: web.Request) -> NoReturn: ) ) - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for request in requests: ret = await router.resolve(request) @@ -518,7 +518,7 @@ async def handler(request: web.Request) -> NoReturn: request = _mock_request(method="GET", path="/") - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for i in range(250): ret = await router.resolve(request) @@ -558,7 +558,7 @@ async def handler(request: web.Request) -> NoReturn: for customer in range(250) ] - async def run_url_dispatcher_benchmark() -> Optional[web.UrlMappingMatchInfo]: + async def run_url_dispatcher_benchmark() -> web.UrlMappingMatchInfo | None: ret = None for request in requests: ret = await router.resolve(request) diff --git a/tests/test_circular_imports.py b/tests/test_circular_imports.py index d513e9bde8b..9b5d7ed2697 100644 --- a/tests/test_circular_imports.py +++ b/tests/test_circular_imports.py @@ -14,10 +14,11 @@ import socket import subprocess import sys +from collections.abc import Generator from itertools import chain from pathlib import Path from types import ModuleType -from typing import TYPE_CHECKING, Generator, List, Union +from typing import TYPE_CHECKING, Union import pytest @@ -28,8 +29,8 @@ def _mark_aiohttp_worker_for_skipping( - importables: List[str], -) -> List[Union[str, "ParameterSet"]]: + importables: list[str], +) -> list[Union[str, "ParameterSet"]]: return [ ( pytest.param( @@ -45,7 +46,7 @@ def _mark_aiohttp_worker_for_skipping( ] -def _find_all_importables(pkg: ModuleType) -> List[str]: +def _find_all_importables(pkg: ModuleType) -> list[str]: """Find all importables in the project. Return them in order. diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 6ebfd01c6ca..fd63f1f59c3 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -13,17 +13,9 @@ import tarfile import time import zipfile +from collections.abc import AsyncIterator, Awaitable, Callable from contextlib import suppress -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - List, - NoReturn, - Optional, - Type, -) +from typing import Any, NoReturn from unittest import mock import pytest @@ -1714,7 +1706,7 @@ async def write_bytes( self: ClientRequest, writer: StreamWriter, conn: Connection, - content_length: Optional[int] = None, + content_length: int | None = None, ) -> None: nonlocal write_mock, writelines_mock original_write = writer._write @@ -3239,7 +3231,7 @@ async def resolve( host: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET, - ) -> List[ResolveResult]: + ) -> list[ResolveResult]: server = etc_hosts[(host, port)] assert server.port is not None @@ -4367,7 +4359,7 @@ async def not_ok_handler(request): [(42, TypeError), ("InvalidUrl", InvalidURL)], ) async def test_request_with_wrong_proxy( - aiohttp_client: AiohttpClient, value: Any, exc_type: Type[Exception] + aiohttp_client: AiohttpClient, value: Any, exc_type: type[Exception] ) -> None: app = web.Application() session = await aiohttp_client(app) @@ -5156,7 +5148,7 @@ def __init__(self, port: int): async def resolve( self, host: str, port: int = 0, family: int = 0 - ) -> List[ResolveResult]: + ) -> list[ResolveResult]: if host in ("amazon.it", "www.amazon.it"): return [ { diff --git a/tests/test_client_middleware.py b/tests/test_client_middleware.py index 217877759c0..da5bcece6e8 100644 --- a/tests/test_client_middleware.py +++ b/tests/test_client_middleware.py @@ -2,7 +2,7 @@ import json import socket -from typing import Dict, List, NoReturn, Optional, Union +from typing import NoReturn import pytest @@ -243,7 +243,7 @@ async def handler(request: web.Request) -> web.Response: async def challenge_auth_middleware( request: ClientRequest, handler: ClientHandlerType ) -> ClientResponse: - nonce: Optional[str] = None + nonce: str | None = None attempted: bool = False while True: @@ -285,7 +285,7 @@ async def challenge_auth_middleware( async def test_client_middleware_multi_step_auth(aiohttp_server: AiohttpServer) -> None: """Test middleware with multi-step authentication flow.""" auth_state: dict[str, int] = {} - middleware_state: Dict[str, Optional[Union[int, str]]] = { + middleware_state: dict[str, int | str | None] = { "step": 0, "session": None, "challenge": None, @@ -372,7 +372,7 @@ async def test_client_middleware_conditional_retry( ) -> None: """Test middleware with conditional retry based on response content.""" request_count = 0 - token_state: Dict[str, Union[str, bool]] = { + token_state: dict[str, str | bool] = { "token": "old-token", "refreshed": False, } @@ -735,7 +735,7 @@ async def test_client_middleware_blocks_connection_before_established( ) -> None: """Test that middleware can block connections before they are established.""" blocked_hosts = {"blocked.example.com", "evil.com"} - connection_attempts: List[str] = [] + connection_attempts: list[str] = [] async def handler(request: web.Request) -> web.Response: return web.Response(text="Reached") @@ -801,7 +801,7 @@ async def test_client_middleware_blocks_connection_without_dns_lookup( ) -> None: """Test that middleware prevents DNS lookups for blocked hosts.""" blocked_hosts = {"blocked.domain.tld"} - dns_lookups_made: List[str] = [] + dns_lookups_made: list[str] = [] # Create a simple server for the allowed request async def handler(request: web.Request) -> web.Response: @@ -817,7 +817,7 @@ async def resolve( hostname: str, port: int = 0, family: socket.AddressFamily = socket.AF_INET, - ) -> List[ResolveResult]: + ) -> list[ResolveResult]: dns_lookups_made.append(hostname) return await super().resolve(hostname, port, family) @@ -878,7 +878,7 @@ class TrackingConnector(TCPConnector): connection_attempts = 0 async def _create_connection( - self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout" + self, req: ClientRequest, traces: list["Trace"], timeout: "ClientTimeout" ) -> ResponseHandler: self.connection_attempts += 1 return await super()._create_connection(req, traces, timeout) @@ -927,11 +927,11 @@ async def test_middleware_uses_session_avoids_recursion_with_path_check( aiohttp_server: AiohttpServer, ) -> None: """Test that middleware can avoid infinite recursion using a path check.""" - log_collector: List[Dict[str, str]] = [] + log_collector: list[dict[str, str]] = [] async def log_api_handler(request: web.Request) -> web.Response: """Handle log API requests.""" - data: Dict[str, str] = await request.json() + data: dict[str, str] = await request.json() log_collector.append(data) return web.Response(text="OK") @@ -993,14 +993,14 @@ async def test_middleware_uses_session_avoids_recursion_with_disabled_middleware aiohttp_server: AiohttpServer, ) -> None: """Test that middleware can avoid infinite recursion by disabling middleware.""" - log_collector: List[Dict[str, str]] = [] + log_collector: list[dict[str, str]] = [] request_count = 0 async def log_api_handler(request: web.Request) -> web.Response: """Handle log API requests.""" nonlocal request_count request_count += 1 - data: Dict[str, str] = await request.json() + data: dict[str, str] = await request.json() log_collector.append(data) return web.Response(text="OK") @@ -1061,8 +1061,8 @@ async def test_middleware_can_check_request_body( aiohttp_server: AiohttpServer, ) -> None: """Test that middleware can check request body.""" - received_bodies: List[str] = [] - received_headers: List[Dict[str, str]] = [] + received_bodies: list[str] = [] + received_headers: list[dict[str, str]] = [] async def handler(request: web.Request) -> web.Response: """Server handler that receives requests.""" diff --git a/tests/test_client_middleware_digest_auth.py b/tests/test_client_middleware_digest_auth.py index 2059bfea337..064d4d78239 100644 --- a/tests/test_client_middleware_digest_auth.py +++ b/tests/test_client_middleware_digest_auth.py @@ -2,8 +2,9 @@ import io import re +from collections.abc import Generator from hashlib import md5, sha1 -from typing import Generator, Literal, Union +from typing import Literal from unittest import mock import pytest @@ -323,7 +324,7 @@ def KD(secret: str, data: str) -> str: async def test_digest_response_exact_match( qop: str, algorithm: str, - body: Union[Literal[b""], BytesIOPayload], + body: Literal[b""] | BytesIOPayload, body_str: str, mock_sha1_digest: mock.MagicMock, ) -> None: diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 950dd93aeb6..56d35ac3433 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -5,9 +5,9 @@ import sys import urllib.parse import warnings -from collections.abc import Callable, Iterable +from collections.abc import AsyncIterator, Callable, Iterable from http.cookies import BaseCookie, Morsel, SimpleCookie -from typing import Any, AsyncIterator, Optional, Protocol, Union +from typing import Any, Protocol from unittest import mock import pytest @@ -1124,7 +1124,7 @@ async def gen(): original_write_bytes = req.write_bytes async def _mock_write_bytes( - writer: AbstractStreamWriter, conn: mock.Mock, content_length: Optional[int] + writer: AbstractStreamWriter, conn: mock.Mock, content_length: int | None ) -> None: # Ensure the task is scheduled await asyncio.sleep(0) @@ -1558,7 +1558,7 @@ def test_gen_default_accept_encoding( @pytest.mark.usefixtures("netrc_contents") def test_basicauth_from_netrc_present( make_request: Any, - expected_auth: Optional[helpers.BasicAuth], + expected_auth: helpers.BasicAuth | None, ): """Test appropriate Authorization header is sent when netrc is not empty.""" req = make_request("get", "http://example.com", trust_env=True) @@ -1710,7 +1710,7 @@ async def test_write_bytes_with_iterable_content_length_limit( loop: asyncio.AbstractEventLoop, buf: bytearray, conn: mock.Mock, - data: Union[list[bytes], bytes], + data: list[bytes] | bytes, ) -> None: """Test that write_bytes respects content_length limit for iterable data.""" # Test with iterable data @@ -2173,8 +2173,8 @@ async def test_expect100_with_body_becomes_none() -> None: ) def test_content_length_for_methods( method: str, - data: Optional[bytes], - expected_content_length: Optional[str], + data: bytes | None, + expected_content_length: str | None, loop: asyncio.AbstractEventLoop, ) -> None: """Test that Content-Length header is set correctly for all HTTP methods.""" diff --git a/tests/test_client_response.py b/tests/test_client_response.py index a5061e08fe1..4e7ae2fb1a6 100644 --- a/tests/test_client_response.py +++ b/tests/test_client_response.py @@ -3,8 +3,8 @@ import asyncio import gc import sys +from collections.abc import Callable from http.cookies import SimpleCookie -from typing import Callable from unittest import mock import pytest diff --git a/tests/test_client_session.py b/tests/test_client_session.py index c296c9670b0..d76e718ebb3 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -6,8 +6,9 @@ import sys import warnings from collections import deque +from collections.abc import Awaitable, Callable, Iterator from http.cookies import BaseCookie, SimpleCookie -from typing import Any, Awaitable, Callable, Iterator, List, Optional, cast +from typing import Any, cast from unittest import mock from uuid import uuid4 @@ -687,7 +688,7 @@ async def test_ws_connect_unix_socket_allowed_protocols( original_connect = session._connector.connect async def connect( - req: ClientRequest, traces: List[Trace], timeout: aiohttp.ClientTimeout + req: ClientRequest, traces: list[Trace], timeout: aiohttp.ClientTimeout ) -> Connection: conn = await original_connect(req, traces, timeout) connections.append(conn) @@ -726,13 +727,13 @@ def __init__(self) -> None: self._filter_cookies_mock = mock.Mock(return_value=BaseCookie()) self._clear_mock = mock.Mock() self._clear_domain_mock = mock.Mock() - self._items: List[Any] = [] + self._items: list[Any] = [] @property def quote_cookie(self) -> bool: return True - def clear(self, predicate: Optional[abc.ClearCookiePredicate] = None) -> None: + def clear(self, predicate: abc.ClearCookiePredicate | None = None) -> None: self._clear_mock(predicate) def clear_domain(self, domain: str) -> None: @@ -995,7 +996,7 @@ def reset_mocks() -> None: for m in mocks: m.reset_mock() - def to_trace_urls(mock_func: mock.Mock) -> List[URL]: + def to_trace_urls(mock_func: mock.Mock) -> list[URL]: return [call_args[0][-1].url for call_args in mock_func.call_args_list] def to_url(path: str) -> URL: diff --git a/tests/test_client_ws.py b/tests/test_client_ws.py index 48481055a7f..6f6e75dceae 100644 --- a/tests/test_client_ws.py +++ b/tests/test_client_ws.py @@ -2,7 +2,7 @@ import base64 import hashlib import os -from typing import Any, Type +from typing import Any from unittest import mock import pytest @@ -513,7 +513,7 @@ async def test_close_exc2(loop, ws_key, key_data) -> None: @pytest.mark.parametrize("exc", (ClientConnectionResetError, ConnectionResetError)) async def test_send_data_after_close( - exc: Type[Exception], + exc: type[Exception], ws_key: bytes, key_data: bytes, loop: asyncio.AbstractEventLoop, diff --git a/tests/test_client_ws_functional.py b/tests/test_client_ws_functional.py index 7b6bd032244..8c24c077e6a 100644 --- a/tests/test_client_ws_functional.py +++ b/tests/test_client_ws_functional.py @@ -1,6 +1,6 @@ import asyncio import sys -from typing import Any, List, NoReturn, Optional +from typing import Any, NoReturn from unittest import mock import pytest @@ -932,7 +932,7 @@ async def handler(request: web.Request) -> NoReturn: ping_started = loop.create_future() async def delayed_send_frame( - message: bytes, opcode: int, compress: Optional[int] = None + message: bytes, opcode: int, compress: int | None = None ) -> None: assert opcode == WSMsgType.PING nonlocal cancelled @@ -1248,7 +1248,7 @@ async def handler(request: web.Request) -> NoReturn: app = web.Application() app.router.add_route("GET", "/", handler) - sync_future: "asyncio.Future[List[aiohttp.ClientWebSocketResponse]]" = ( + sync_future: asyncio.Future[list[aiohttp.ClientWebSocketResponse]] = ( loop.create_future() ) client = await aiohttp_client(app) diff --git a/tests/test_connector.py b/tests/test_connector.py index 9048bf61e2f..09a8b26c193 100644 --- a/tests/test_connector.py +++ b/tests/test_connector.py @@ -10,19 +10,10 @@ import uuid import warnings from collections import defaultdict, deque +from collections.abc import Callable, Sequence from concurrent import futures from contextlib import closing, suppress -from typing import ( - Any, - Callable, - DefaultDict, - Deque, - List, - Literal, - Optional, - Sequence, - Tuple, -) +from typing import Any, Literal from unittest import mock import pytest @@ -771,7 +762,7 @@ async def _resolve_host(host, port, traces=None): connected = False async def start_connection(*args, **kwargs): - addr_infos: List[AddrInfoType] = kwargs["addr_infos"] + addr_infos: list[AddrInfoType] = kwargs["addr_infos"] first_addr_info = addr_infos[0] first_addr_info_addr = first_addr_info[-1] @@ -899,7 +890,7 @@ def get_extra_info(param): [0.1, 0.25, None], ) async def test_tcp_connector_happy_eyeballs( - loop: Any, happy_eyeballs_delay: Optional[float] + loop: Any, happy_eyeballs_delay: float | None ) -> None: conn = aiohttp.TCPConnector(happy_eyeballs_delay=happy_eyeballs_delay) @@ -1005,7 +996,7 @@ async def _resolve_host(host, port, traces=None): async def start_connection(*args, **kwargs): nonlocal interleave - addr_infos: List[AddrInfoType] = kwargs["addr_infos"] + addr_infos: list[AddrInfoType] = kwargs["addr_infos"] interleave = kwargs["interleave"] # Mock the 4th host connecting successfully fourth_addr_info = addr_infos[3] @@ -1147,7 +1138,7 @@ async def test_tcp_connector_multiple_hosts_one_timeout( async def _resolve_host( host: str, port: int, traces: object = None - ) -> List[ResolveResult]: + ) -> list[ResolveResult]: return [ { "hostname": host, @@ -1163,7 +1154,7 @@ async def _resolve_host( async def start_connection( addr_infos: Sequence[AddrInfoType], *, - interleave: Optional[int] = None, + interleave: int | None = None, **kwargs: object, ) -> socket.socket: nonlocal timeout_error @@ -1188,8 +1179,8 @@ async def start_connection( assert False async def create_connection( - *args: object, sock: Optional[socket.socket] = None, **kwargs: object - ) -> Tuple[ResponseHandler, ResponseHandler]: + *args: object, sock: socket.socket | None = None, **kwargs: object + ) -> tuple[ResponseHandler, ResponseHandler]: nonlocal connected assert isinstance(sock, socket.socket) @@ -1897,7 +1888,7 @@ async def test_cleanup(key: ConnectionKey) -> None: m2 = mock.Mock() m1.is_connected.return_value = True m2.is_connected.return_value = False - testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + testset: defaultdict[ConnectionKey, deque[tuple[ResponseHandler, float]]] = ( defaultdict(deque) ) testset[key] = deque([(m1, 10), (m2, 300)]) @@ -1922,7 +1913,7 @@ async def test_cleanup_close_ssl_transport( ) -> None: proto = create_mocked_conn(loop) transport = proto.transport - testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + testset: defaultdict[ConnectionKey, deque[tuple[ResponseHandler, float]]] = ( defaultdict(deque) ) testset[ssl_key] = deque([(proto, 10)]) @@ -1948,7 +1939,7 @@ async def test_cleanup_close_ssl_transport( async def test_cleanup2(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: m = create_mocked_conn() m.is_connected.return_value = True - testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + testset: defaultdict[ConnectionKey, deque[tuple[ResponseHandler, float]]] = ( defaultdict(deque) ) testset[key] = deque([(m, 300)]) @@ -1969,7 +1960,7 @@ async def test_cleanup2(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> async def test_cleanup3(loop: asyncio.AbstractEventLoop, key: ConnectionKey) -> None: m = create_mocked_conn(loop) m.is_connected.return_value = True - testset: DefaultDict[ConnectionKey, Deque[Tuple[ResponseHandler, float]]] = ( + testset: defaultdict[ConnectionKey, deque[tuple[ResponseHandler, float]]] = ( defaultdict(deque) ) testset[key] = deque([(m, 290.1), (create_mocked_conn(loop), 305.1)]) @@ -2722,7 +2713,7 @@ async def test_multiple_dns_resolution_requests_success( ) -> None: """Verify that multiple DNS resolution requests are handled correctly.""" - async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]: """Delayed resolve() task.""" for _ in range(3): await asyncio.sleep(0) @@ -2784,7 +2775,7 @@ async def test_multiple_dns_resolution_requests_failure( ) -> None: """Verify that DNS resolution failure for multiple requests is handled correctly.""" - async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]: """Delayed resolve() task.""" for _ in range(3): await asyncio.sleep(0) @@ -2837,7 +2828,7 @@ async def test_multiple_dns_resolution_requests_cancelled( ) -> None: """Verify that DNS resolution cancellation does not affect other tasks.""" - async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]: """Delayed resolve() task.""" for _ in range(3): await asyncio.sleep(0) @@ -2889,7 +2880,7 @@ async def test_multiple_dns_resolution_requests_first_cancelled( ) -> None: """Verify that first DNS resolution cancellation does not make other resolutions fail.""" - async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]: """Delayed resolve() task.""" for _ in range(3): await asyncio.sleep(0) @@ -2953,7 +2944,7 @@ async def test_multiple_dns_resolution_requests_first_fails_second_successful( """Verify that first DNS resolution fails the first time and is successful the second time.""" attempt = 0 - async def delay_resolve(*args: object, **kwargs: object) -> List[ResolveResult]: + async def delay_resolve(*args: object, **kwargs: object) -> list[ResolveResult]: """Delayed resolve() task.""" nonlocal attempt for _ in range(3): @@ -4077,7 +4068,7 @@ async def send_dns_cache_miss(self, *args: object, **kwargs: object) -> None: if request_count <= 1: raise Exception("first attempt") - async def resolve_response() -> List[ResolveResult]: + async def resolve_response() -> list[ResolveResult]: await asyncio.sleep(0) return [token] diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py index 17e27e8f7ae..dc2c9f4cc98 100644 --- a/tests/test_cookiejar.py +++ b/tests/test_cookiejar.py @@ -9,7 +9,6 @@ import unittest from http.cookies import BaseCookie, Morsel, SimpleCookie from operator import not_ -from typing import List, Set from unittest import mock import pytest @@ -1260,12 +1259,12 @@ async def test_update_cookies_from_headers_duplicate_names() -> None: assert len(jar) == 3 # Verify we have both session-id cookies - all_cookies: List[Morsel[str]] = list(jar) - session_ids: List[Morsel[str]] = [c for c in all_cookies if c.key == "session-id"] + all_cookies: list[Morsel[str]] = list(jar) + session_ids: list[Morsel[str]] = [c for c in all_cookies if c.key == "session-id"] assert len(session_ids) == 2 # Check their domains are different - domains: Set[str] = {c["domain"] for c in session_ids} + domains: set[str] = {c["domain"] for c in session_ids} assert domains == {"example.com", "www.example.com"} diff --git a/tests/test_helpers.py b/tests/test_helpers.py index a343cbdfedf..5693d0b96b2 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -558,8 +558,8 @@ def test_proxies_from_env_skipped(caplog, url_input, expected_scheme) -> None: url = URL(url_input) assert helpers.proxies_from_env() == {} assert len(caplog.records) == 1 - log_message = "{proto!s} proxies {url!s} are not supported, ignoring".format( - proto=expected_scheme.upper(), url=url + log_message = ( + f"{expected_scheme.upper()!s} proxies {url!s} are not supported, ignoring" ) assert caplog.record_tuples == [("aiohttp.client", 30, log_message)] diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index ec51a46b89c..680b8404d4f 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -4,7 +4,7 @@ import re import sys from contextlib import nullcontext -from typing import Any, Dict, List +from typing import Any from unittest import mock from urllib.parse import quote @@ -59,7 +59,7 @@ def protocol(): return mock.Mock() -def _gen_ids(parsers: List[Any]) -> List[str]: +def _gen_ids(parsers: list[Any]) -> list[str]: return [ "py-parser" if parser.__module__ == "aiohttp.http_parser" else "c-parser" for parser in parsers @@ -630,7 +630,7 @@ def test_headers_content_length_err_2(parser) -> None: parser.feed_data(text) -_pad: Dict[bytes, str] = { +_pad: dict[bytes, str] = { b"": "empty", # not a typo. Python likes triple zero b"\000": "NUL", @@ -783,7 +783,7 @@ def test_http_request_bad_status_line(parser) -> None: assert r"\n" not in exc_info.value.message -_num: Dict[bytes, str] = { +_num: dict[bytes, str] = { # dangerous: accepted by Python int() # unicodedata.category("\U0001D7D9") == 'Nd' "\N{mathematical double-struck digit one}".encode(): "utf8digit", diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py index ffd20a0d677..c01efe2f909 100644 --- a/tests/test_http_writer.py +++ b/tests/test_http_writer.py @@ -2,7 +2,7 @@ import array import asyncio import zlib -from typing import Generator, Iterable, Union +from collections.abc import Generator, Iterable from unittest import mock import pytest @@ -66,7 +66,7 @@ def decompress(data: bytes) -> bytes: return d.decompress(data) -def decode_chunked(chunked: Union[bytes, bytearray]) -> bytes: +def decode_chunked(chunked: bytes | bytearray) -> bytes: i = 0 out = b"" while i < len(chunked): diff --git a/tests/test_multipart.py b/tests/test_multipart.py index 75b73a78070..3380a811651 100644 --- a/tests/test_multipart.py +++ b/tests/test_multipart.py @@ -2,7 +2,6 @@ import io import json import pathlib -import sys from unittest import mock import pytest @@ -784,7 +783,6 @@ async def test_invalid_boundary(self) -> None: with pytest.raises(ValueError): await reader.next() - @pytest.mark.skipif(sys.version_info < (3, 10), reason="Needs anext()") async def test_read_boundary_across_chunks(self) -> None: class SplitBoundaryStream: def __init__(self) -> None: diff --git a/tests/test_payload.py b/tests/test_payload.py index 9aa97b20da0..9f79440ef87 100644 --- a/tests/test_payload.py +++ b/tests/test_payload.py @@ -6,7 +6,7 @@ from collections.abc import AsyncIterator from io import StringIO from pathlib import Path -from typing import List, Optional, TextIO, Union +from typing import TextIO, Union import pytest from multidict import CIMultiDict @@ -34,7 +34,7 @@ async def drain(self) -> None: """No-op for test writer.""" def enable_compression( - self, encoding: str = "deflate", strategy: Optional[int] = None + self, encoding: str = "deflate", strategy: int | None = None ) -> None: """Compression not implemented for test writer.""" @@ -180,14 +180,14 @@ async def write( """Store the chunk in the written list.""" self.written.append(bytes(chunk)) - async def write_eof(self, chunk: Optional[bytes] = None) -> None: + async def write_eof(self, chunk: bytes | None = None) -> None: """write_eof implementation - no-op for tests.""" async def drain(self) -> None: """Drain implementation - no-op for tests.""" def enable_compression( - self, encoding: str = "deflate", strategy: Optional[int] = None + self, encoding: str = "deflate", strategy: int | None = None ) -> None: """Enable compression - no-op for tests.""" @@ -312,7 +312,7 @@ async def test_bytesio_payload_write_with_length_remaining_zero() -> None: original_read = bio.read read_calls = 0 - def mock_read(size: Optional[int] = None) -> bytes: + def mock_read(size: int | None = None) -> bytes: nonlocal read_calls read_calls += 1 if read_calls == 1: @@ -416,9 +416,9 @@ async def test_iobase_payload_large_content_length() -> None: class TrackingBytesIO(io.BytesIO): def __init__(self, data: bytes) -> None: super().__init__(data) - self.read_sizes: List[int] = [] + self.read_sizes: list[int] = [] - def read(self, size: Optional[int] = -1) -> bytes: + def read(self, size: int | None = -1) -> bytes: self.read_sizes.append(size if size is not None else -1) return super().read(size) @@ -491,9 +491,9 @@ async def test_textio_payload_large_content_length() -> None: class TrackingStringIO(io.StringIO): def __init__(self, data: str) -> None: super().__init__(data) - self.read_sizes: List[int] = [] + self.read_sizes: list[int] = [] - def read(self, size: Optional[int] = -1) -> str: + def read(self, size: int | None = -1) -> str: self.read_sizes.append(size if size is not None else -1) return super().read(size) diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py index f4bc020d1f0..f1ae83ee7c6 100644 --- a/tests/test_proxy_functional.py +++ b/tests/test_proxy_functional.py @@ -4,9 +4,9 @@ import platform import ssl import sys +from collections.abc import Awaitable, Callable from contextlib import suppress from re import match as match_regex -from typing import Awaitable, Callable from unittest import mock from uuid import uuid4 @@ -760,10 +760,7 @@ async def test_proxy_from_env_http_with_auth_from_netrc( proxy = await proxy_test_server() auth = aiohttp.BasicAuth("user", "pass") netrc_file = tmp_path / "test_netrc" - netrc_file_data = "machine 127.0.0.1 login {} password {}".format( - auth.login, - auth.password, - ) + netrc_file_data = f"machine 127.0.0.1 login {auth.login} password {auth.password}" with netrc_file.open("w") as f: f.write(netrc_file_data) mocker.patch.dict( @@ -786,10 +783,7 @@ async def test_proxy_from_env_http_without_auth_from_netrc( proxy = await proxy_test_server() auth = aiohttp.BasicAuth("user", "pass") netrc_file = tmp_path / "test_netrc" - netrc_file_data = "machine 127.0.0.2 login {} password {}".format( - auth.login, - auth.password, - ) + netrc_file_data = f"machine 127.0.0.2 login {auth.login} password {auth.password}" with netrc_file.open("w") as f: f.write(netrc_file_data) mocker.patch.dict( diff --git a/tests/test_resolver.py b/tests/test_resolver.py index 1866939ba6b..02a222d0d82 100644 --- a/tests/test_resolver.py +++ b/tests/test_resolver.py @@ -2,9 +2,9 @@ import gc import ipaddress import socket -from collections.abc import Generator +from collections.abc import Awaitable, Callable, Collection, Generator from ipaddress import ip_address -from typing import Any, Awaitable, Callable, Collection, List, NamedTuple, Tuple, Union +from typing import Any, NamedTuple from unittest.mock import Mock, create_autospec, patch import pytest @@ -71,7 +71,7 @@ def dns_resolver_manager() -> Generator[_DNSResolverManager, None, None]: class FakeAIODNSAddrInfoNode(NamedTuple): family: int - addr: Union[Tuple[bytes, int], Tuple[bytes, int, int, int]] + addr: tuple[bytes, int] | tuple[bytes, int, int, int] class FakeAIODNSAddrInfoIPv4Result: @@ -126,7 +126,7 @@ async def fake_query_result(result): def fake_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[Any]]: - async def fake(*args: Any, **kwargs: Any) -> List[Any]: + async def fake(*args: Any, **kwargs: Any) -> list[Any]: if not hosts: raise socket.gaierror @@ -136,7 +136,7 @@ async def fake(*args: Any, **kwargs: Any) -> List[Any]: def fake_ipv6_addrinfo(hosts: Collection[str]) -> Callable[..., Awaitable[Any]]: - async def fake(*args: Any, **kwargs: Any) -> List[Any]: + async def fake(*args: Any, **kwargs: Any) -> list[Any]: if not hosts: raise socket.gaierror @@ -155,7 +155,7 @@ async def fake(*args: Any, **kwargs: Any) -> List[Any]: def fake_ipv6_nameinfo(host: str) -> Callable[..., Awaitable[Any]]: - async def fake(*args: Any, **kwargs: Any) -> Tuple[str, int]: + async def fake(*args: Any, **kwargs: Any) -> tuple[str, int]: return host, 0 return fake @@ -306,7 +306,7 @@ async def test_threaded_negative_lookup() -> None: async def test_threaded_negative_ipv6_lookup() -> None: loop = Mock() - ips: List[Any] = [] + ips: list[Any] = [] loop.getaddrinfo = fake_ipv6_addrinfo(ips) resolver = ThreadedResolver() resolver._loop = loop @@ -319,7 +319,7 @@ async def test_threaded_negative_lookup_with_unknown_result() -> None: # If compile CPython with `--disable-ipv6` option, # we will get an (int, bytes) tuple, instead of a Exception. - async def unknown_addrinfo(*args: Any, **kwargs: Any) -> List[Any]: + async def unknown_addrinfo(*args: Any, **kwargs: Any) -> list[Any]: return [ ( socket.AF_INET6, diff --git a/tests/test_run_app.py b/tests/test_run_app.py index eeb28d6ff41..206eafde6b0 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -9,17 +9,8 @@ import subprocess import sys import time -from typing import ( - AsyncIterator, - Awaitable, - Callable, - Coroutine, - Iterator, - NoReturn, - Optional, - Set, - Tuple, -) +from collections.abc import AsyncIterator, Awaitable, Callable, Coroutine, Iterator +from typing import NoReturn from unittest import mock from uuid import uuid4 @@ -973,8 +964,8 @@ def run_app( sock: socket.socket, timeout: int, task: Callable[[], Coroutine[None, None, None]], - extra_test: Optional[Callable[[ClientSession], Awaitable[None]]] = None, - ) -> Tuple["asyncio.Task[None]", int]: + extra_test: Callable[[ClientSession], Awaitable[None]] | None = None, + ) -> tuple["asyncio.Task[None]", int]: num_connections = -1 t = test_task = None port = sock.getsockname()[1] @@ -1203,7 +1194,7 @@ async def run_test(app: web.Application) -> None: def test_shutdown_close_websockets(self, unused_port_socket: socket.socket) -> None: sock = unused_port_socket port = sock.getsockname()[1] - WS = web.AppKey("ws", Set[web.WebSocketResponse]) + WS = web.AppKey("ws", set[web.WebSocketResponse]) client_finished = server_finished = False async def ws_handler(request: web.Request) -> web.WebSocketResponse: diff --git a/tests/test_test_utils.py b/tests/test_test_utils.py index a4866c73f3e..9056ba79e00 100644 --- a/tests/test_test_utils.py +++ b/tests/test_test_utils.py @@ -1,8 +1,9 @@ import asyncio import gzip import sys +from collections.abc import Iterator from socket import socket -from typing import Any, Iterator +from typing import Any from unittest import mock import pytest diff --git a/tests/test_web_app.py b/tests/test_web_app.py index 69655b1a49a..3905f4a0f0b 100644 --- a/tests/test_web_app.py +++ b/tests/test_web_app.py @@ -1,6 +1,8 @@ import asyncio import gc -from typing import AsyncIterator, Callable, Iterator, NoReturn, Type +import sys +from collections.abc import AsyncIterator, Callable, Iterator +from typing import NoReturn from unittest import mock import pytest @@ -278,20 +280,34 @@ def test_appkey_repr_concrete() -> None: def test_appkey_repr_nonconcrete() -> None: key = web.AppKey("key", Iterator[int]) - assert repr(key) in ( - # pytest-xdist: - "", - "", - ) + if sys.version_info < (3, 11): + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + else: + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) def test_appkey_repr_annotated() -> None: key = web.AppKey[Iterator[int]]("key") - assert repr(key) in ( - # pytest-xdist: - "", - "", - ) + if sys.version_info < (3, 11): + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) + else: + assert repr(key) in ( + # pytest-xdist: + "", + "", + ) def test_app_str_keys() -> None: @@ -491,7 +507,7 @@ async def fail_ctx(app: web.Application) -> AsyncIterator[NoReturn]: @pytest.mark.parametrize("exc_cls", (Exception, asyncio.CancelledError)) async def test_cleanup_ctx_exception_on_cleanup_multiple( - exc_cls: Type[BaseException], + exc_cls: type[BaseException], ) -> None: app = web.Application() out = [] diff --git a/tests/test_web_exceptions.py b/tests/test_web_exceptions.py index 3358a947d3d..de294d188ff 100644 --- a/tests/test_web_exceptions.py +++ b/tests/test_web_exceptions.py @@ -1,5 +1,6 @@ import collections import re +import sys from traceback import format_exception from unittest import mock @@ -50,6 +51,7 @@ async def write_headers(status_line, headers): return req +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Breaks on Iterable") def test_all_http_exceptions_exported() -> None: assert "HTTPException" in web.__all__ for name in dir(web): @@ -78,6 +80,7 @@ async def test_HTTPOk(buf, http_request) -> None: ) +@pytest.mark.skipif(sys.version_info < (3, 11), reason="Breaks on Iterable") def test_terminal_classes_has_status_code() -> None: terminals = set() for name in dir(web): diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py index e0f123def0d..d3b139fce85 100644 --- a/tests/test_web_functional.py +++ b/tests/test_web_functional.py @@ -4,7 +4,8 @@ import pathlib import socket import sys -from typing import Any, Dict, Generator, NoReturn, Optional, Tuple +from collections.abc import Generator +from typing import Any, NoReturn from unittest import mock import pytest @@ -634,7 +635,7 @@ async def test_expect_handler_custom_response(aiohttp_client) -> None: async def handler(request: web.Request) -> web.Response: return web.Response(text="handler") - async def expect_handler(request: web.Request) -> Optional[web.Response]: + async def expect_handler(request: web.Request) -> web.Response | None: k = request.headers.get("X-Key") cached_value = cache.get(k) if cached_value: @@ -1137,11 +1138,11 @@ async def handler(request): def compressor_case( request: pytest.FixtureRequest, parametrize_zlib_backend: None, -) -> Generator[Tuple[ZLibCompressObjProtocol, str], None, None]: +) -> Generator[tuple[ZLibCompressObjProtocol, str], None, None]: encoding: str = request.param max_wbits: int = ZLibBackend.MAX_WBITS - encoding_to_wbits: Dict[str, int] = { + encoding_to_wbits: dict[str, int] = { "deflate": max_wbits, "deflate-raw": -max_wbits, "gzip": 16 + max_wbits, @@ -1153,7 +1154,7 @@ def compressor_case( async def test_response_with_precompressed_body( aiohttp_client: AiohttpClient, - compressor_case: Tuple[ZLibCompressObjProtocol, str], + compressor_case: tuple[ZLibCompressObjProtocol, str], ) -> None: compressor, encoding = compressor_case diff --git a/tests/test_web_response.py b/tests/test_web_response.py index c07bf671d8c..609791b5b2b 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -4,8 +4,8 @@ import io import json import sys +from collections.abc import AsyncIterator from concurrent.futures import ThreadPoolExecutor -from typing import AsyncIterator, Optional from unittest import mock import aiosignal @@ -1254,7 +1254,7 @@ def read(self, size: int = -1) -> bytes: ), ), ) -def test_payload_body_get_text(payload, expected: Optional[str]) -> None: +def test_payload_body_get_text(payload, expected: str | None) -> None: resp = Response(body=payload) if expected is None: with pytest.raises(TypeError): diff --git a/tests/test_web_sendfile_functional.py b/tests/test_web_sendfile_functional.py index 0325a4658e2..36d4d54032f 100644 --- a/tests/test_web_sendfile_functional.py +++ b/tests/test_web_sendfile_functional.py @@ -3,7 +3,8 @@ import gzip import pathlib import socket -from typing import Any, Iterable, Optional +from collections.abc import Iterable +from typing import Any from unittest import mock import pytest @@ -308,7 +309,7 @@ async def test_static_file_with_encoding_and_enable_compression( sender: Any, accept_encoding: str, expect_encoding: str, - forced_compression: Optional[web.ContentCoding], + forced_compression: web.ContentCoding | None, ): """Test that enable_compression does not double compress when an encoded file is also present.""" @@ -706,18 +707,18 @@ async def handler(request): ) assert len(responses) == 3 assert responses[0].status == 206, "failed 'bytes=0-999': %s" % responses[0].reason - assert responses[0].headers["Content-Range"] == "bytes 0-999/{}".format( - filesize + assert ( + responses[0].headers["Content-Range"] == f"bytes 0-999/{filesize}" ), "failed: Content-Range Error" assert responses[1].status == 206, ( "failed 'bytes=1000-1999': %s" % responses[1].reason ) - assert responses[1].headers["Content-Range"] == "bytes 1000-1999/{}".format( - filesize + assert ( + responses[1].headers["Content-Range"] == f"bytes 1000-1999/{filesize}" ), "failed: Content-Range Error" assert responses[2].status == 206, "failed 'bytes=2000-': %s" % responses[2].reason - assert responses[2].headers["Content-Range"] == "bytes 2000-{}/{}".format( - filesize - 1, filesize + assert ( + responses[2].headers["Content-Range"] == f"bytes 2000-{filesize - 1}/{filesize}" ), "failed: Content-Range Error" body = await asyncio.gather( diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index ee60b6917c5..7084d821899 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -4,8 +4,9 @@ import pathlib import socket import sys +from collections.abc import Generator from stat import S_IFIFO, S_IMODE -from typing import Any, Generator, NoReturn, Optional +from typing import Any, NoReturn import pytest import yarl @@ -57,7 +58,7 @@ async def test_access_root_of_static_handler( status: int, prefix: str, request_path: str, - data: Optional[bytes], + data: bytes | None, ) -> None: # Tests the operation of static file server. # Try to access the root of static file server, and make @@ -143,7 +144,7 @@ async def test_access_root_of_static_handler_xss( status: int, prefix: str, request_path: str, - data: Optional[bytes], + data: bytes | None, ) -> None: # Tests the operation of static file server. # Try to access the root of static file server, and make diff --git a/tests/test_web_websocket_functional.py b/tests/test_web_websocket_functional.py index f7f5c31356c..c071b5839ed 100644 --- a/tests/test_web_websocket_functional.py +++ b/tests/test_web_websocket_functional.py @@ -4,7 +4,7 @@ import contextlib import sys import weakref -from typing import Any, NoReturn, Optional +from typing import Any, NoReturn from unittest import mock import pytest @@ -358,7 +358,7 @@ async def handler(request): async def test_close_op_code_from_client(loop: Any, aiohttp_client: Any) -> None: - srv_ws: Optional[web.WebSocketResponse] = None + srv_ws: web.WebSocketResponse | None = None async def handler(request): nonlocal srv_ws @@ -1220,7 +1220,7 @@ async def test_abnormal_closure_when_client_does_not_close( aiohttp_client: AiohttpClient, ) -> None: """Test abnormal closure when the server closes and the client doesn't respond.""" - close_code: Optional[WSCloseCode] = None + close_code: WSCloseCode | None = None async def handler(request: web.Request) -> web.WebSocketResponse: # Setting a short close timeout @@ -1247,7 +1247,7 @@ async def test_normal_closure_while_client_sends_msg( aiohttp_client: AiohttpClient, ) -> None: """Test normal closure when the server closes and the client responds properly.""" - close_code: Optional[WSCloseCode] = None + close_code: WSCloseCode | None = None got_close_code = asyncio.Event() async def handler(request: web.Request) -> web.WebSocketResponse: diff --git a/tests/test_websocket_parser.py b/tests/test_websocket_parser.py index 37e15b64c18..abf4e0ad726 100644 --- a/tests/test_websocket_parser.py +++ b/tests/test_websocket_parser.py @@ -2,7 +2,6 @@ import pickle import random import struct -from typing import Union from unittest import mock import pytest @@ -29,14 +28,14 @@ class PatchableWebSocketReader(WebSocketReader): def parse_frame( self, data: bytes - ) -> list[tuple[bool, int, Union[bytes, bytearray], int]]: + ) -> list[tuple[bool, int, bytes | bytearray, int]]: # This method is overridden to allow for patching in tests. - frames: list[tuple[bool, int, Union[bytes, bytearray], int]] = [] + frames: list[tuple[bool, int, bytes | bytearray, int]] = [] def _handle_frame( fin: bool, opcode: int, - payload: Union[bytes, bytearray], + payload: bytes | bytearray, compressed: int, ) -> None: # This method is overridden to allow for patching in tests. @@ -259,7 +258,7 @@ def test_parse_frame_header_payload_size( def test_ping_frame( out: WebSocketDataQueue, parser: PatchableWebSocketReader, - data: Union[bytes, bytearray, memoryview], + data: bytes | bytearray | memoryview, ) -> None: parser._handle_frame(True, WSMsgType.PING, b"data", 0) res = out._buffer[0] diff --git a/tests/test_websocket_writer.py b/tests/test_websocket_writer.py index a985acfb7f6..34d8b55c16f 100644 --- a/tests/test_websocket_writer.py +++ b/tests/test_websocket_writer.py @@ -1,6 +1,7 @@ import asyncio import random -from typing import Any, Callable +from collections.abc import Callable +from typing import Any from unittest import mock import pytest diff --git a/tests/test_worker.py b/tests/test_worker.py index 60d1e8b088b..94eb8967bd4 100644 --- a/tests/test_worker.py +++ b/tests/test_worker.py @@ -3,7 +3,7 @@ import os import socket import ssl -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING from unittest import mock import pytest @@ -29,9 +29,9 @@ class BaseTestWorker: def __init__(self) -> None: - self.servers: Dict[object, object] = {} + self.servers: dict[object, object] = {} self.exit_code = 0 - self._notify_waiter: Optional[asyncio.Future[bool]] = None + self._notify_waiter: asyncio.Future[bool] | None = None self.cfg = mock.Mock() self.cfg.graceful_timeout = 100 self.pid = "pid" From 54df652857a96dd69a54a1a1ae4fb9b0b21dda75 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 18:00:39 +0000 Subject: [PATCH 08/51] Bump click from 8.1.8 to 8.3.0 (#11597) Bumps [click](https://github.com/pallets/click) from 8.1.8 to 8.3.0.
Release notes

Sourced from click's releases.

8.3.0

This is the Click 8.3.0 feature release. A feature release may include new features, remove previously deprecated code, add new deprecation, or introduce potentially breaking changes.

We encourage everyone to upgrade. You can read more about our Version Support Policy on our website.

PyPI: https://pypi.org/project/click/8.3.0/ Changes: https://click.palletsprojects.com/page/changes/#version-8-3-0 Milestone https://github.com/pallets/click/milestone/27

  • Improved flag option handling: Reworked the relationship between flag_value and default parameters for better consistency:

    • The default parameter value is now preserved as-is and passed directly to CLI functions (no more unexpected transformations)
    • Exception: flag options with default=True maintain backward compatibility by defaulting to their flag_value
    • The default parameter can now be any type (bool, None, etc.)
    • Fixes inconsistencies reported in: #1992 #2514 #2610 #3024 #3030
  • Allow default to be set on Argument for nargs = -1. #2164 #3030

  • Show correct auto complete value for nargs option in combination with flag option #2813

  • Show correct auto complete value for nargs option in combination with flag option #2813

  • Fix handling of quoted and escaped parameters in Fish autocompletion. #2995 #3013

  • Lazily import shutil. #3023

  • Properly forward exception information to resources registered with click.core.Context.with_resource(). #2447 #3058

  • Fix regression related to EOF handling in CliRunner. #2939 #2940

8.2.2

This is the Click 8.2.2 fix release, which fixes bugs but does not otherwise change behavior and should not result in breaking changes compared to the latest feature release.

PyPI: https://pypi.org/project/click/8.2.2/ Changes: https://click.palletsprojects.com/page/changes/#version-8-2-2 Milestone: https://github.com/pallets/click/milestone/25

  • Fix reconciliation of default, flag_value and type parameters for flag options, as well as parsing and normalization of environment variables. #2952 #2956
  • Fix typing issue in BadParameter and MissingParameter exceptions for the parameter param_hint that did not allow for a sequence of string where the underlying functino _join_param_hints allows for it. #2777 #2990
  • Use the value of Enum choices to render their default value in help screen. #2911 #3004
  • Fix completion for the Z shell (zsh) for completion items containing colons. #2703 #2846
  • Don't include envvar in error hint when not configured. #2971 #2972

... (truncated)

Changelog

Sourced from click's changelog.

Version 8.3.0

Released 2025-09-17

  • Improved flag option handling: Reworked the relationship between flag_value and default parameters for better consistency:

    • The default parameter value is now preserved as-is and passed directly to CLI functions (no more unexpected transformations)
    • Exception: flag options with default=True maintain backward compatibility by defaulting to their flag_value
    • The default parameter can now be any type (bool, None, etc.)
    • Fixes inconsistencies reported in: :issue:1992 :issue:2514 :issue:2610 :issue:3024 :pr:3030
  • Allow default to be set on Argument for nargs = -1. :issue:2164 :pr:3030

  • Show correct auto complete value for nargs option in combination with flag option :issue:2813

  • Fix handling of quoted and escaped parameters in Fish autocompletion. :issue:2995 :pr:3013

  • Lazily import shutil. :pr:3023

  • Properly forward exception information to resources registered with click.core.Context.with_resource(). :issue:2447 :pr:3058

  • Fix regression related to EOF handling in CliRunner. :issue:2939 :pr:2940

Version 8.2.2

Released 2025-07-31

  • Fix reconciliation of default, flag_value and type parameters for flag options, as well as parsing and normalization of environment variables. :issue:2952 :pr:2956
  • Fix typing issue in BadParameter and MissingParameter exceptions for the parameter param_hint that did not allow for a sequence of string where the underlying function _join_param_hints allows for it. :issue:2777 :pr:2990
  • Use the value of Enum choices to render their default value in help screen. Refs :issue:2911 :pr:3004
  • Fix completion for the Z shell (zsh) for completion items containing colons. :issue:2703 :pr:2846
  • Don't include envvar in error hint when not configured. :issue:2971 :pr:2972
  • Fix a rare race in click.testing.StreamMixer's finalization that manifested as a ValueError on close in a multi-threaded test session. :issue:2993 :pr:2991

Version 8.2.1

Released 2025-05-20

... (truncated)

Commits
  • 00fadb8 Release version 8.3.0
  • 2a0e3ba testing/CliRunner: Fix regression related to EOF introduced in 262bdf0 (#2940)
  • e11a1ef Merge branch 'main' into fix-cli-runner-prompt-eof-handling
  • 36deba8 Forward exception information to resources registered in a context (#3058)
  • f2cae7a #2447 Add summary of PR to changelog for 8.3.x
  • 7c7ec36 #2447 Split resource exception handling tests in single and nested
  • 92129c5 #2447 Added exception forwarding to context tests
  • 555fa9b #2447 Forward exception data to exit stack when calling __exit__
  • 16fe802 Add more tests on Enum rendering (#3053)
  • d36de6f Add more tests on Enum rendering their item's names and not values
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=click&package-manager=pip&previous-version=8.1.8&new-version=8.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 3b49dec2d1c..52e9fb7b6cc 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -47,7 +47,7 @@ charset-normalizer==3.4.3 # via requests cherry-picker==2.6.0 # via -r requirements/dev.in -click==8.1.8 +click==8.3.0 # via # cherry-picker # pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 397ba176293..be221bc5ee9 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -47,7 +47,7 @@ charset-normalizer==3.4.3 # via requests cherry-picker==2.6.0 # via -r requirements/dev.in -click==8.1.8 +click==8.3.0 # via # cherry-picker # pip-tools diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index bf5ab08f083..5515f32cfd4 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -14,7 +14,7 @@ certifi==2025.10.5 # via requests charset-normalizer==3.4.3 # via requests -click==8.1.8 +click==8.3.0 # via towncrier docutils==0.21.2 # via sphinx diff --git a/requirements/doc.txt b/requirements/doc.txt index 7aa95aee161..1bb127cf02c 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -14,7 +14,7 @@ certifi==2025.10.5 # via requests charset-normalizer==3.4.3 # via requests -click==8.1.8 +click==8.3.0 # via towncrier docutils==0.21.2 # via sphinx diff --git a/requirements/lint.txt b/requirements/lint.txt index 31d20fd8f81..cd6df0e8c7c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -19,7 +19,7 @@ cffi==2.0.0 # pytest-codspeed cfgv==3.4.0 # via pre-commit -click==8.1.8 +click==8.3.0 # via slotscheck cryptography==46.0.2 # via trustme diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 8e073988a37..eb859e6a0ca 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -12,7 +12,7 @@ cffi==2.0.0 # via # cryptography # pytest-codspeed -click==8.2.1 +click==8.3.0 # via wait-for-it coverage==7.10.7 # via diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 59cb7ba6341..79e091033b4 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -25,7 +25,7 @@ cffi==2.0.0 # cryptography # pycares # pytest-codspeed -click==8.2.1 +click==8.3.0 # via wait-for-it coverage==7.10.7 # via diff --git a/requirements/test.txt b/requirements/test.txt index c1cccbba540..2eb2e79fef3 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -25,7 +25,7 @@ cffi==2.0.0 # cryptography # pycares # pytest-codspeed -click==8.1.8 +click==8.3.0 # via wait-for-it coverage==7.10.7 # via From 7057f1a456cc53498e795351f6c6568369c4f00e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 7 Oct 2025 18:17:48 +0000 Subject: [PATCH 09/51] Bump pytest-codspeed from 4.0.0 to 4.1.0 (#11590) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pytest-codspeed](https://github.com/CodSpeedHQ/pytest-codspeed) from 4.0.0 to 4.1.0.
Release notes

Sourced from pytest-codspeed's releases.

v4.1.0

What's New?

🚀 The new version of CodSpeedHQ/instrument-hooks will improve the quality of the walltime profiling!

Details

✨ Features

🐛 Bug Fixes

  • Use ParamSpec in instrument definition instead of tuple and dict by @​art049
  • Use ParamSpec in the plugin call definition by @​flying-sheep

📚 Documentation

⚙️ Internals

Full Changelog: https://github.com/CodSpeedHQ/pytest-codspeed/compare/v4.0.0...v4.1.0

Changelog

Sourced from pytest-codspeed's changelog.

[4.1.0] - 2025-10-06

🐛 Bug Fixes

  • Use ParamSpec in instrument definition instead of tuple and dict by @​art049
  • Use ParamSpec in the plugin call definition by @​flying-sheep

📚 Documentation

⚙️ Internals

Commits
  • 0a3c31b Release v4.1.0 🚀
  • 6cb7a88 chore: exclude beta tags in changelog
  • 63eec3b chore: bump instrument-hooks (#87)
  • 192d881 ci: switch to sharded upload (#75)
  • 5f6971d fix: use ParamSpec in instrument definition instead of tuple and dict
  • 44071fc fix: use ParamSpec in the plugin call definition
  • 475be0c docs: add pull_request trigger comment
  • 3e26619 chore: fix changelog generation
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pytest-codspeed&package-manager=pip&previous-version=4.0.0&new-version=4.1.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 52e9fb7b6cc..f2a48640109 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -183,7 +183,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.0.0 +pytest-codspeed==4.1.1 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index be221bc5ee9..d83fab45281 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -178,7 +178,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.0.0 +pytest-codspeed==4.1.1 # via # -r requirements/lint.in # -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index cd6df0e8c7c..8852148c4a3 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -78,7 +78,7 @@ pytest==8.4.2 # -r requirements/lint.in # pytest-codspeed # pytest-mock -pytest-codspeed==4.0.0 +pytest-codspeed==4.1.1 # via -r requirements/lint.in pytest-mock==3.15.1 # via -r requirements/lint.in diff --git a/requirements/test-common.txt b/requirements/test-common.txt index eb859e6a0ca..feb90abb72d 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -71,7 +71,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.0.0 +pytest-codspeed==4.1.1 # via -r requirements/test-common.in pytest-cov==7.0.0 # via -r requirements/test-common.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 79e091033b4..4017c3673cc 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -104,7 +104,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.0.0 +pytest-codspeed==4.1.1 # via -r requirements/test-common.in pytest-cov==7.0.0 # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index 2eb2e79fef3..4ef23e97e9e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -104,7 +104,7 @@ pytest==8.4.2 # pytest-cov # pytest-mock # pytest-xdist -pytest-codspeed==4.0.0 +pytest-codspeed==4.1.1 # via -r requirements/test-common.in pytest-cov==7.0.0 # via -r requirements/test-common.in From 95dde8ed4515006063bdda04834d23049206707a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Oct 2025 10:48:32 +0000 Subject: [PATCH 10/51] Bump github/codeql-action from 3 to 4 (#11608) Bumps [github/codeql-action](https://github.com/github/codeql-action) from 3 to 4.
Release notes

Sourced from github/codeql-action's releases.

v3.30.7

CodeQL Action Changelog

See the releases page for the relevant changes to the CodeQL CLI and language packs.

3.30.7 - 06 Oct 2025

No user facing changes.

See the full CHANGELOG.md for more information.

v3.30.6

CodeQL Action Changelog

See the releases page for the relevant changes to the CodeQL CLI and language packs.

3.30.6 - 02 Oct 2025

  • Update default CodeQL bundle version to 2.23.2. #3168

See the full CHANGELOG.md for more information.

v3.30.5

CodeQL Action Changelog

See the releases page for the relevant changes to the CodeQL CLI and language packs.

3.30.5 - 26 Sep 2025

  • We fixed a bug that was introduced in 3.30.4 with upload-sarif which resulted in files without a .sarif extension not getting uploaded. #3160

See the full CHANGELOG.md for more information.

v3.30.4

CodeQL Action Changelog

See the releases page for the relevant changes to the CodeQL CLI and language packs.

3.30.4 - 25 Sep 2025

  • We have improved the CodeQL Action's ability to validate that the workflow it is used in does not use different versions of the CodeQL Action for different workflow steps. Mixing different versions of the CodeQL Action in the same workflow is unsupported and can lead to unpredictable results. A warning will now be emitted from the codeql-action/init step if different versions of the CodeQL Action are detected in the workflow file. Additionally, an error will now be thrown by the other CodeQL Action steps if they load a configuration file that was generated by a different version of the codeql-action/init step. #3099 and #3100
  • We added support for reducing the size of dependency caches for Java analyses, which will reduce cache usage and speed up workflows. This will be enabled automatically at a later time. #3107
  • You can now run the latest CodeQL nightly bundle by passing tools: nightly to the init action. In general, the nightly bundle is unstable and we only recommend running it when directed by GitHub staff. #3130
  • Update default CodeQL bundle version to 2.23.1. #3118

See the full CHANGELOG.md for more information.

v3.30.3

CodeQL Action Changelog

... (truncated)

Changelog

Sourced from github/codeql-action's changelog.

3.29.4 - 23 Jul 2025

No user facing changes.

3.29.3 - 21 Jul 2025

No user facing changes.

3.29.2 - 30 Jun 2025

  • Experimental: When the quality-queries input for the init action is provided with an argument, separate .quality.sarif files are produced and uploaded for each language with the results of the specified queries. Do not use this in production as it is part of an internal experiment and subject to change at any time. #2935

3.29.1 - 27 Jun 2025

  • Fix bug in PR analysis where user-provided include query filter fails to exclude non-included queries. #2938
  • Update default CodeQL bundle version to 2.22.1. #2950

3.29.0 - 11 Jun 2025

  • Update default CodeQL bundle version to 2.22.0. #2925
  • Bump minimum CodeQL bundle version to 2.16.6. #2912

3.28.21 - 28 July 2025

No user facing changes.

3.28.20 - 21 July 2025

3.28.19 - 03 Jun 2025

  • The CodeQL Action no longer includes its own copy of the extractor for the actions language, which is currently in public preview. The actions extractor has been included in the CodeQL CLI since v2.20.6. If your workflow has enabled the actions language and you have pinned your tools: property to a specific version of the CodeQL CLI earlier than v2.20.6, you will need to update to at least CodeQL v2.20.6 or disable actions analysis.
  • Update default CodeQL bundle version to 2.21.4. #2910

3.28.18 - 16 May 2025

  • Update default CodeQL bundle version to 2.21.3. #2893
  • Skip validating SARIF produced by CodeQL for improved performance. #2894
  • The number of threads and amount of RAM used by CodeQL can now be set via the CODEQL_THREADS and CODEQL_RAM runner environment variables. If set, these environment variables override the threads and ram inputs respectively. #2891

3.28.17 - 02 May 2025

  • Update default CodeQL bundle version to 2.21.2. #2872

3.28.16 - 23 Apr 2025

... (truncated)

Commits
  • aac66ec Remove update-proxy-release workflow
  • 91a63dc Remove undefined values from results of unsafeEntriesInvariant
  • d25fa60 ESLint: Disable no-unused-vars for parameters starting with _
  • 3adb1ff Reorder supported tags in descending order
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=github/codeql-action&package-manager=github_actions&previous-version=3&new-version=4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/codeql.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index be954079132..ed447b9f29c 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -29,17 +29,17 @@ jobs: uses: actions/checkout@v5 - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@v4 with: languages: ${{ matrix.language }} config-file: ./.github/codeql.yml queries: +security-and-quality - name: Autobuild - uses: github/codeql-action/autobuild@v3 + uses: github/codeql-action/autobuild@v4 if: ${{ matrix.language == 'python' || matrix.language == 'javascript' }} - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@v4 with: category: "/language:${{ matrix.language }}" From d0c4d6157e8ebf3419e85655cfdcd73c7defb144 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 8 Oct 2025 10:54:30 +0000 Subject: [PATCH 11/51] Bump pydantic from 2.11.9 to 2.12.0 (#11609) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.11.9 to 2.12.0.
Release notes

Sourced from pydantic's releases.

v2.12.0 2025-10-07

v2.12.0 (2025-10-07)

[!NOTE] Check out the blog post for release highlights.

This is the final 2.12 release. It features the work of 20 external contributors and provides useful new features, along with initial Python 3.14 support. Several minor changes (considered non-breaking changes according to our versioning policy) are also included in this release. Make sure to look into them before upgrading.

Changes (see the 2.12.0a1 and 2.12.0b1 releases for additional changes since 2.11):

Packaging

New Features

Fixes

New Contributors

Full Changelog: https://github.com/pydantic/pydantic/compare/v2.11.10...v2.12.0

v2.12.0b1 2025-10-03

v2.12.0b1 (2025-10-03)

This is the first beta release of the upcoming 2.12 release.

What's Changed

... (truncated)

Changelog

Sourced from pydantic's changelog.

v2.12.0 (2025-10-07)

GitHub release

What's Changed

This is the final 2.12 release. It features the work of 20 external contributors and provides useful new features, along with initial Python 3.14 support. Several minor changes (considered non-breaking changes according to our versioning policy) are also included in this release. Make sure to look into them before upgrading.

Changes (see the alpha and beta releases for additional changes since 2.11):

Packaging

New Features

Fixes

New Contributors

v2.12.0b1 (2025-10-03)

GitHub release

This is the first beta release of the upcoming 2.12 release.

What's Changed

Packaging

... (truncated)

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.11.9&new-version=2.12.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test-common.txt | 4 ++-- requirements/test-ft.txt | 4 ++-- requirements/test.txt | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index f2a48640109..5a2fde809a7 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -156,9 +156,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.11.9 +pydantic==2.12.0 # via python-on-whales -pydantic-core==2.33.2 +pydantic-core==2.41.1 # via pydantic pyenchant==3.3.0 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index d83fab45281..041892d794e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -153,9 +153,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.11.9 +pydantic==2.12.0 # via python-on-whales -pydantic-core==2.33.2 +pydantic-core==2.41.1 # via pydantic pygments==2.19.2 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 8852148c4a3..d2b035ed53f 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -65,9 +65,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.11.9 +pydantic==2.12.0 # via python-on-whales -pydantic-core==2.33.2 +pydantic-core==2.41.1 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test-common.txt b/requirements/test-common.txt index feb90abb72d..d9eb74ce2b6 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -56,9 +56,9 @@ proxy-py==2.4.10 # via -r requirements/test-common.in pycparser==2.23 # via cffi -pydantic==2.12.0a1 +pydantic==2.12.0 # via python-on-whales -pydantic-core==2.37.2 +pydantic-core==2.41.1 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 4017c3673cc..986371beae7 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -89,9 +89,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.0a1 +pydantic==2.12.0 # via python-on-whales -pydantic-core==2.37.2 +pydantic-core==2.41.1 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test.txt b/requirements/test.txt index 4ef23e97e9e..d62a44f3ec7 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -89,9 +89,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.0a1 +pydantic==2.12.0 # via python-on-whales -pydantic-core==2.37.2 +pydantic-core==2.41.1 # via pydantic pygments==2.19.2 # via From 3cf4ebaeed4b4c9c8592ec324186a8cd887914e4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 8 Oct 2025 19:36:13 +0100 Subject: [PATCH 12/51] [PR #11611/49290944 backport][3.14] Do not assume Python 3.14 has stdlib zstd (#11613) **This is a backport of PR #11611 as merged into master (49290944c0d098a85dd867a9e331cfb33a802199).** Co-authored-by: Jacob Henner --- CHANGES/11603.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/compression_utils.py | 18 +++++++++--------- 3 files changed, 11 insertions(+), 9 deletions(-) create mode 100644 CHANGES/11603.bugfix.rst diff --git a/CHANGES/11603.bugfix.rst b/CHANGES/11603.bugfix.rst new file mode 100644 index 00000000000..1475698f17a --- /dev/null +++ b/CHANGES/11603.bugfix.rst @@ -0,0 +1 @@ +Fixed Python 3.14 support when built without ``zstd`` support -- by :user:`JacobHenner`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 38b888c6545..bbbdd650665 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -170,6 +170,7 @@ Ivan Lakovic Ivan Larin J. Nick Koston Jacob Champion +Jacob Henner Jaesung Lee Jake Davis Jakob Ackermann diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index 18739ed70fc..db060303e6f 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -21,18 +21,21 @@ except ImportError: # pragma: no cover HAS_BROTLI = False -if sys.version_info >= (3, 14): - import compression.zstd # noqa: I900 +try: + from compression.zstd import ( # type: ignore[import-not-found] # noqa: I900 + ZstdDecompressor, + ) HAS_ZSTD = True -else: +except ImportError: try: - import zstandard + from zstandard import ZstdDecompressor HAS_ZSTD = True except ImportError: HAS_ZSTD = False + MAX_SYNC_CHUNK_SIZE = 1024 @@ -297,13 +300,10 @@ def __init__(self) -> None: "The zstd decompression is not available. " "Please install `zstandard` module" ) - if sys.version_info >= (3, 14): - self._obj = compression.zstd.ZstdDecompressor() - else: - self._obj = zstandard.ZstdDecompressor() + self._obj = ZstdDecompressor() def decompress_sync(self, data: bytes) -> bytes: - return self._obj.decompress(data) + return self._obj.decompress(data) # type: ignore[no-any-return] def flush(self) -> bytes: return b"" From 1798ef911d7f88d8b6dfda47fcf051181b19f68a Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 9 Oct 2025 12:00:27 +0100 Subject: [PATCH 13/51] [PR #11611/49290944 backport][3.13] Do not assume Python 3.14 has stdlib zstd (#11612) **This is a backport of PR #11611 as merged into master (49290944c0d098a85dd867a9e331cfb33a802199).** --------- Co-authored-by: Jacob Henner --- CHANGES.rst | 2 +- CHANGES/11603.bugfix.rst | 1 + CONTRIBUTORS.txt | 1 + aiohttp/compression_utils.py | 18 +++++++++--------- 4 files changed, 12 insertions(+), 10 deletions(-) create mode 100644 CHANGES/11603.bugfix.rst diff --git a/CHANGES.rst b/CHANGES.rst index 5fdccd5ca45..0bab04c4072 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -4546,7 +4546,7 @@ Bugfixes `#5853 `_ - Added ``params`` keyword argument to ``ClientSession.ws_connect``. -- :user:`hoh`. `#5868 `_ -- Uses :py:class:`~asyncio.ThreadedChildWatcher` under POSIX to allow setting up test loop in non-main thread. +- Uses ``asyncio.ThreadedChildWatcher`` under POSIX to allow setting up test loop in non-main thread. `#5877 `_ - Fix the error in handling the return value of `getaddrinfo`. `getaddrinfo` will return an `(int, bytes)` tuple, if CPython could not handle the address family. diff --git a/CHANGES/11603.bugfix.rst b/CHANGES/11603.bugfix.rst new file mode 100644 index 00000000000..1475698f17a --- /dev/null +++ b/CHANGES/11603.bugfix.rst @@ -0,0 +1 @@ +Fixed Python 3.14 support when built without ``zstd`` support -- by :user:`JacobHenner`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 38b888c6545..bbbdd650665 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -170,6 +170,7 @@ Ivan Lakovic Ivan Larin J. Nick Koston Jacob Champion +Jacob Henner Jaesung Lee Jake Davis Jakob Ackermann diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index cdede4244b4..2a462f47153 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -21,18 +21,21 @@ except ImportError: # pragma: no cover HAS_BROTLI = False -if sys.version_info >= (3, 14): - import compression.zstd # noqa: I900 +try: + from compression.zstd import ( # type: ignore[import-not-found] # noqa: I900 + ZstdDecompressor, + ) HAS_ZSTD = True -else: +except ImportError: try: - import zstandard + from zstandard import ZstdDecompressor HAS_ZSTD = True except ImportError: HAS_ZSTD = False + MAX_SYNC_CHUNK_SIZE = 1024 @@ -297,13 +300,10 @@ def __init__(self) -> None: "The zstd decompression is not available. " "Please install `zstandard` module" ) - if sys.version_info >= (3, 14): - self._obj = compression.zstd.ZstdDecompressor() - else: - self._obj = zstandard.ZstdDecompressor() + self._obj = ZstdDecompressor() def decompress_sync(self, data: bytes) -> bytes: - return self._obj.decompress(data) + return self._obj.decompress(data) # type: ignore[no-any-return] def flush(self) -> bytes: return b"" From 6bc08a77aace2a9eab4018194f62e34af17d8ab7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Oct 2025 11:01:03 +0000 Subject: [PATCH 14/51] Bump tomli from 2.2.1 to 2.3.0 (#11619) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [tomli](https://github.com/hukkin/tomli) from 2.2.1 to 2.3.0.
Changelog

Sourced from tomli's changelog.

2.3.0

  • Added
    • Binary wheels for Python 3.14 (also free-threaded)
  • Performance
    • Reduced import time
Commits
  • 3fccd16 Bump version: 2.2.1 → 2.3.0
  • 6504016 Add 2.3.0 changelog
  • 0bc66fc Remove now off-by-default PyPy from cibuildwheel skip list
  • 0aa242f Update license metadata to appease PEP 639
  • a18221e Bump GitHub CI actions
  • 6fa4d90 [pre-commit.ci] pre-commit autoupdate (#260)
  • b974fa1 [pre-commit.ci] pre-commit autoupdate (#248)
  • f574f36 Update mypy to 1.15 and use --strict mode (#257)
  • 1da01ef Reduce import time by removing typing import (#251)
  • 4188188 Reduce import time by removing string and tomli._types imports
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=tomli&package-manager=pip&previous-version=2.2.1&new-version=2.3.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 8 files changed, 8 insertions(+), 8 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5a2fde809a7..16336f2cc4d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -247,7 +247,7 @@ stamina==25.1.0 # via cherry-picker tenacity==9.1.2 # via stamina -tomli==2.2.1 +tomli==2.3.0 # via # build # cherry-picker diff --git a/requirements/dev.txt b/requirements/dev.txt index 041892d794e..973c0560a30 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -238,7 +238,7 @@ stamina==25.1.0 # via cherry-picker tenacity==9.1.2 # via stamina -tomli==2.2.1 +tomli==2.3.0 # via # build # cherry-picker diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 5515f32cfd4..95bc1d18964 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -61,7 +61,7 @@ sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in -tomli==2.2.1 +tomli==2.3.0 # via # sphinx # towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 1bb127cf02c..9369ca3daf3 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -54,7 +54,7 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in -tomli==2.2.1 +tomli==2.3.0 # via # sphinx # towncrier diff --git a/requirements/lint.txt b/requirements/lint.txt index d2b035ed53f..b60cf04511a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -94,7 +94,7 @@ six==1.17.0 # via python-dateutil slotscheck==0.19.1 # via -r requirements/lint.in -tomli==2.2.1 +tomli==2.3.0 # via # mypy # pytest diff --git a/requirements/test-common.txt b/requirements/test-common.txt index d9eb74ce2b6..9e9cd9bc1ac 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -93,7 +93,7 @@ setuptools-git==1.2 # via -r requirements/test-common.in six==1.17.0 # via python-dateutil -tomli==2.2.1 +tomli==2.3.0 # via # coverage # mypy diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 986371beae7..4cdb5767c19 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -126,7 +126,7 @@ setuptools-git==1.2 # via -r requirements/test-common.in six==1.17.0 # via python-dateutil -tomli==2.2.1 +tomli==2.3.0 # via # coverage # mypy diff --git a/requirements/test.txt b/requirements/test.txt index d62a44f3ec7..81185867120 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -126,7 +126,7 @@ setuptools-git==1.2 # via -r requirements/test-common.in six==1.17.0 # via python-dateutil -tomli==2.2.1 +tomli==2.3.0 # via # coverage # mypy From 853918352d88cf0d14647e72aa96c916031be362 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Oct 2025 11:08:01 +0000 Subject: [PATCH 15/51] Bump propcache from 0.4.0 to 0.4.1 (#11620) Bumps [propcache](https://github.com/aio-libs/propcache) from 0.4.0 to 0.4.1.
Release notes

Sourced from propcache's releases.

0.4.1

Bug fixes

  • Fixed reference leak caused by Py_INCREF because Cython has its own reference counter systems -- by :user:Vizonex.

    Related issues and pull requests on GitHub: #162.

Contributor-facing changes

  • Fixes the default value for the os parameter in reusable-build-wheel.yml to be ubuntu-latest instead of ubuntu.

    Related issues and pull requests on GitHub: #155.


Changelog

Sourced from propcache's changelog.

0.4.1

(2025-10-08)

Bug fixes

  • Fixed reference leak caused by Py_INCREF because Cython has its own reference counter systems -- by :user:Vizonex.

    Related issues and pull requests on GitHub: :issue:162.

Contributor-facing changes

  • Fixes the default value for the os parameter in reusable-build-wheel.yml to be ubuntu-latest instead of ubuntu.

    Related issues and pull requests on GitHub: :issue:155.


Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=propcache&package-manager=pip&previous-version=0.4.0&new-version=0.4.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 7 files changed, 7 insertions(+), 7 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index 8f629b8a566..5ed765c243a 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -32,7 +32,7 @@ multidict==6.7.0 # yarl packaging==25.0 # via gunicorn -propcache==0.4.0 +propcache==0.4.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/base.txt b/requirements/base.txt index 4e37bd8498f..b08b907e868 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -32,7 +32,7 @@ multidict==6.7.0 # yarl packaging==25.0 # via gunicorn -propcache==0.4.0 +propcache==0.4.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 16336f2cc4d..042f4ae04dd 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -146,7 +146,7 @@ pluggy==1.6.0 # pytest-cov pre-commit==4.3.0 # via -r requirements/lint.in -propcache==0.4.0 +propcache==0.4.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/dev.txt b/requirements/dev.txt index 973c0560a30..6805c639e61 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -143,7 +143,7 @@ pluggy==1.6.0 # pytest-cov pre-commit==4.3.0 # via -r requirements/lint.in -propcache==0.4.0 +propcache==0.4.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index c4220c48940..97011893f06 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -28,7 +28,7 @@ multidict==6.7.0 # via # -r requirements/runtime-deps.in # yarl -propcache==0.4.0 +propcache==0.4.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 4cdb5767c19..41d06eb07c9 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -79,7 +79,7 @@ pluggy==1.6.0 # via # pytest # pytest-cov -propcache==0.4.0 +propcache==0.4.1 # via # -r requirements/runtime-deps.in # yarl diff --git a/requirements/test.txt b/requirements/test.txt index 81185867120..d3f8539c01e 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -79,7 +79,7 @@ pluggy==1.6.0 # via # pytest # pytest-cov -propcache==0.4.0 +propcache==0.4.1 # via # -r requirements/runtime-deps.in # yarl From 18f2546e3d924e4e46c766b8fcf000f56cf2c96a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Oct 2025 11:15:43 +0000 Subject: [PATCH 16/51] Bump filelock from 3.19.1 to 3.20.0 (#11621) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [filelock](https://github.com/tox-dev/py-filelock) from 3.19.1 to 3.20.0.
Release notes

Sourced from filelock's releases.

3.20.0

What's Changed

New Contributors

Full Changelog: https://github.com/tox-dev/filelock/compare/3.19.1...3.20.0

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=filelock&package-manager=pip&previous-version=3.19.1&new-version=3.20.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 042f4ae04dd..307f2653d36 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -72,7 +72,7 @@ exceptiongroup==1.3.0 # via pytest execnet==2.1.1 # via pytest-xdist -filelock==3.19.1 +filelock==3.20.0 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster diff --git a/requirements/dev.txt b/requirements/dev.txt index 6805c639e61..4d6321f36aa 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -70,7 +70,7 @@ exceptiongroup==1.3.0 # via pytest execnet==2.1.1 # via pytest-xdist -filelock==3.19.1 +filelock==3.20.0 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster diff --git a/requirements/lint.txt b/requirements/lint.txt index b60cf04511a..ed98cea382a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -27,7 +27,7 @@ distlib==0.4.0 # via virtualenv exceptiongroup==1.3.0 # via pytest -filelock==3.19.1 +filelock==3.20.0 # via virtualenv forbiddenfruit==0.1.4 # via blockbuster From 8bc503ef2aa5200736af11e707cf04d2d1a88cd0 Mon Sep 17 00:00:00 2001 From: Mingjie Zhao <48666391+ZhaoMJ@users.noreply.github.com> Date: Thu, 9 Oct 2025 21:20:42 +0800 Subject: [PATCH 17/51] [PR #11623/df8ad83 backport][3.14] Fix zstd decompression for chunked zstd response (#11624) --- CHANGES/11623.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/compression_utils.py | 18 +++++++----------- aiohttp/http_parser.py | 2 +- docs/client_quickstart.rst | 2 +- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.in | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.in | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- setup.cfg | 2 +- tests/test_http_parser.py | 14 +++++++------- 17 files changed, 29 insertions(+), 31 deletions(-) create mode 100644 CHANGES/11623.bugfix diff --git a/CHANGES/11623.bugfix b/CHANGES/11623.bugfix new file mode 100644 index 00000000000..447dd56388c --- /dev/null +++ b/CHANGES/11623.bugfix @@ -0,0 +1 @@ +Switched to `backports.zstd` for Python <3.14 and fixed zstd decompression for chunked zstd streams -- by :user:`ZhaoMJ`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index bbbdd650665..15a681ee6af 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -263,6 +263,7 @@ Mikhail Burshteyn Mikhail Kashkin Mikhail Lukyanchenko Mikhail Nacharov +Mingjie Zhao Misha Behersky Mitchell Ferree Morgan Delahaye-Prat diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index db060303e6f..dab571f5548 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -22,18 +22,14 @@ HAS_BROTLI = False try: - from compression.zstd import ( # type: ignore[import-not-found] # noqa: I900 - ZstdDecompressor, - ) + if sys.version_info >= (3, 14): + from compression.zstd import ZstdDecompressor # noqa: I900 + else: # TODO(PY314): Remove mentions of backports.zstd across codebase + from backports.zstd import ZstdDecompressor HAS_ZSTD = True except ImportError: - try: - from zstandard import ZstdDecompressor - - HAS_ZSTD = True - except ImportError: - HAS_ZSTD = False + HAS_ZSTD = False MAX_SYNC_CHUNK_SIZE = 1024 @@ -298,12 +294,12 @@ def __init__(self) -> None: if not HAS_ZSTD: raise RuntimeError( "The zstd decompression is not available. " - "Please install `zstandard` module" + "Please install `backports.zstd` module" ) self._obj = ZstdDecompressor() def decompress_sync(self, data: bytes) -> bytes: - return self._obj.decompress(data) # type: ignore[no-any-return] + return self._obj.decompress(data) def flush(self) -> bytes: return b"" diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index d1126b6ea9e..98097dc76ce 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -966,7 +966,7 @@ def __init__(self, out: StreamReader, encoding: str | None) -> None: if not HAS_ZSTD: raise ContentEncodingError( "Can not decode content-encoding: zstandard (zstd). " - "Please install `zstandard`" + "Please install `backports.zstd`" ) self.decompressor = ZSTDDecompressor() else: diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index 77ebaab0369..b74c2500065 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -188,7 +188,7 @@ just install `Brotli `_ or `brotlicffi `_. You can enable ``zstd`` transfer-encodings support, -install `zstandard `_. +install `backports.zstd `_. If you are using Python >= 3.14, no dependency should be required. JSON Request diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index 5ed765c243a..8173c3a3599 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -46,5 +46,5 @@ typing-extensions==4.15.0 # multidict yarl==1.22.0 # via -r requirements/runtime-deps.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/base.txt b/requirements/base.txt index b08b907e868..b3d43773885 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -48,5 +48,5 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in yarl==1.22.0 # via -r requirements/runtime-deps.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 307f2653d36..433339af833 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -302,7 +302,7 @@ zlib-ng==1.0.0 # via # -r requirements/lint.in # -r requirements/test-common.in -zstandard==0.25.0 ; implementation_name == "cpython" +backports.zstd==0.5.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 4d6321f36aa..f864f0c8ad2 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -293,7 +293,7 @@ zlib-ng==1.0.0 # via # -r requirements/lint.in # -r requirements/test-common.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/lint.in b/requirements/lint.in index 9663712907d..5bfd3c31c65 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,4 +1,5 @@ aiodns +backports.zstd; implementation_name == "cpython" blockbuster freezegun isal @@ -13,4 +14,3 @@ trustme uvloop; platform_system != "Windows" valkey zlib_ng -zstandard; implementation_name == "cpython" diff --git a/requirements/lint.txt b/requirements/lint.txt index ed98cea382a..e2ea2511ca4 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -121,5 +121,5 @@ virtualenv==20.34.0 # via pre-commit zlib-ng==1.0.0 # via -r requirements/lint.in -zstandard==0.25.0 ; implementation_name == "cpython" +backports.zstd==0.5.0 ; implementation_name == "cpython" # via -r requirements/lint.in diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 7400baa8370..ad8f28e750d 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -5,10 +5,10 @@ aiohappyeyeballs >= 2.5.0 aiosignal >= 1.4.0 async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 +backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 propcache >= 0.2.0 yarl >= 1.17.0, < 2.0 -zstandard; platform_python_implementation == 'CPython' and python_version < "3.14" diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 97011893f06..aea8b8bccb9 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -42,5 +42,5 @@ typing-extensions==4.15.0 # multidict yarl==1.22.0 # via -r requirements/runtime-deps.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 41d06eb07c9..1c632ee5aaf 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -152,5 +152,5 @@ yarl==1.22.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via -r requirements/test-common.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index d3f8539c01e..d91804223a8 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -154,5 +154,5 @@ yarl==1.22.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via -r requirements/test-common.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/setup.cfg b/setup.cfg index 583bc8f36f2..e54010f82a1 100644 --- a/setup.cfg +++ b/setup.cfg @@ -70,7 +70,7 @@ speedups = aiodns >= 3.3.0 Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' - zstandard; platform_python_implementation == 'CPython' and python_version < "3.14" + backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" [options.packages.find] exclude = diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index 680b8404d4f..35390232d1e 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -34,13 +34,13 @@ except ImportError: brotli = None -if sys.version_info >= (3, 14): - import compression.zstd as zstandard # noqa: I900 -else: - try: - import zstandard - except ImportError: - zstandard = None # type: ignore[assignment] +try: + if sys.version_info >= (3, 14): + import compression.zstd as zstandard # noqa: I900 + else: + import backports.zstd as zstandard +except ImportError: + zstandard = None # type: ignore[assignment] REQUEST_PARSERS = [HttpRequestParserPy] RESPONSE_PARSERS = [HttpResponseParserPy] From 53620e08582a897679395556ce3d8a593ed3150b Mon Sep 17 00:00:00 2001 From: Mingjie Zhao <48666391+ZhaoMJ@users.noreply.github.com> Date: Thu, 9 Oct 2025 21:27:20 +0800 Subject: [PATCH 18/51] [PR #11623/df8ad83 backport][3.13] Fix zstd decompression for chunked zstd response (#11625) --- CHANGES/11623.bugfix | 1 + CONTRIBUTORS.txt | 1 + aiohttp/compression_utils.py | 18 +++++++----------- aiohttp/http_parser.py | 2 +- docs/client_quickstart.rst | 2 +- requirements/base-ft.txt | 2 +- requirements/base.txt | 2 +- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.in | 2 +- requirements/lint.txt | 2 +- requirements/runtime-deps.in | 2 +- requirements/runtime-deps.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- setup.cfg | 2 +- tests/test_http_parser.py | 14 +++++++------- 17 files changed, 29 insertions(+), 31 deletions(-) create mode 100644 CHANGES/11623.bugfix diff --git a/CHANGES/11623.bugfix b/CHANGES/11623.bugfix new file mode 100644 index 00000000000..447dd56388c --- /dev/null +++ b/CHANGES/11623.bugfix @@ -0,0 +1 @@ +Switched to `backports.zstd` for Python <3.14 and fixed zstd decompression for chunked zstd streams -- by :user:`ZhaoMJ`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index bbbdd650665..15a681ee6af 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -263,6 +263,7 @@ Mikhail Burshteyn Mikhail Kashkin Mikhail Lukyanchenko Mikhail Nacharov +Mingjie Zhao Misha Behersky Mitchell Ferree Morgan Delahaye-Prat diff --git a/aiohttp/compression_utils.py b/aiohttp/compression_utils.py index 2a462f47153..c97abd0f822 100644 --- a/aiohttp/compression_utils.py +++ b/aiohttp/compression_utils.py @@ -22,18 +22,14 @@ HAS_BROTLI = False try: - from compression.zstd import ( # type: ignore[import-not-found] # noqa: I900 - ZstdDecompressor, - ) + if sys.version_info >= (3, 14): + from compression.zstd import ZstdDecompressor # noqa: I900 + else: # TODO(PY314): Remove mentions of backports.zstd across codebase + from backports.zstd import ZstdDecompressor HAS_ZSTD = True except ImportError: - try: - from zstandard import ZstdDecompressor - - HAS_ZSTD = True - except ImportError: - HAS_ZSTD = False + HAS_ZSTD = False MAX_SYNC_CHUNK_SIZE = 1024 @@ -298,12 +294,12 @@ def __init__(self) -> None: if not HAS_ZSTD: raise RuntimeError( "The zstd decompression is not available. " - "Please install `zstandard` module" + "Please install `backports.zstd` module" ) self._obj = ZstdDecompressor() def decompress_sync(self, data: bytes) -> bytes: - return self._obj.decompress(data) # type: ignore[no-any-return] + return self._obj.decompress(data) def flush(self) -> bytes: return b"" diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py index f862088850f..9a2c00e6542 100644 --- a/aiohttp/http_parser.py +++ b/aiohttp/http_parser.py @@ -980,7 +980,7 @@ def __init__(self, out: StreamReader, encoding: Optional[str]) -> None: if not HAS_ZSTD: raise ContentEncodingError( "Can not decode content-encoding: zstandard (zstd). " - "Please install `zstandard`" + "Please install `backports.zstd`" ) self.decompressor = ZSTDDecompressor() else: diff --git a/docs/client_quickstart.rst b/docs/client_quickstart.rst index 77ebaab0369..b74c2500065 100644 --- a/docs/client_quickstart.rst +++ b/docs/client_quickstart.rst @@ -188,7 +188,7 @@ just install `Brotli `_ or `brotlicffi `_. You can enable ``zstd`` transfer-encodings support, -install `zstandard `_. +install `backports.zstd `_. If you are using Python >= 3.14, no dependency should be required. JSON Request diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index 08de9013dc4..8a8d2a15499 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -46,5 +46,5 @@ typing-extensions==4.15.0 # multidict yarl==1.21.0 # via -r requirements/runtime-deps.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/base.txt b/requirements/base.txt index 3b9af1959e8..fa734658aba 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -48,5 +48,5 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in yarl==1.21.0 # via -r requirements/runtime-deps.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index d74296e4dc5..ec3806c6977 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -302,7 +302,7 @@ zlib-ng==1.0.0 # via # -r requirements/lint.in # -r requirements/test-common.in -zstandard==0.25.0 ; implementation_name == "cpython" +backports.zstd==0.5.0 ; implementation_name == "cpython" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 4f67db4f3e1..63cc8589262 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -293,7 +293,7 @@ zlib-ng==1.0.0 # via # -r requirements/lint.in # -r requirements/test-common.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via # -r requirements/lint.in # -r requirements/runtime-deps.in diff --git a/requirements/lint.in b/requirements/lint.in index 9663712907d..5bfd3c31c65 100644 --- a/requirements/lint.in +++ b/requirements/lint.in @@ -1,4 +1,5 @@ aiodns +backports.zstd; implementation_name == "cpython" blockbuster freezegun isal @@ -13,4 +14,3 @@ trustme uvloop; platform_system != "Windows" valkey zlib_ng -zstandard; implementation_name == "cpython" diff --git a/requirements/lint.txt b/requirements/lint.txt index 8de4c8642d5..b3b12bdd62a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -121,5 +121,5 @@ virtualenv==20.34.0 # via pre-commit zlib-ng==1.0.0 # via -r requirements/lint.in -zstandard==0.25.0 ; implementation_name == "cpython" +backports.zstd==0.5.0 ; implementation_name == "cpython" # via -r requirements/lint.in diff --git a/requirements/runtime-deps.in b/requirements/runtime-deps.in index 7400baa8370..ad8f28e750d 100644 --- a/requirements/runtime-deps.in +++ b/requirements/runtime-deps.in @@ -5,10 +5,10 @@ aiohappyeyeballs >= 2.5.0 aiosignal >= 1.4.0 async-timeout >= 4.0, < 6.0 ; python_version < "3.11" attrs >= 17.3.0 +backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' frozenlist >= 1.1.1 multidict >=4.5, < 7.0 propcache >= 0.2.0 yarl >= 1.17.0, < 2.0 -zstandard; platform_python_implementation == 'CPython' and python_version < "3.14" diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index 727745a5617..232ae352db1 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -42,5 +42,5 @@ typing-extensions==4.15.0 # multidict yarl==1.21.0 # via -r requirements/runtime-deps.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 3e2a7fa1d59..04b1dcb86e4 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -152,5 +152,5 @@ yarl==1.21.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via -r requirements/test-common.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index 3d356ab6c7f..b7c53b0b6d5 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -154,5 +154,5 @@ yarl==1.21.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via -r requirements/test-common.in -zstandard==0.25.0 ; platform_python_implementation == "CPython" and python_version < "3.14" +backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" # via -r requirements/runtime-deps.in diff --git a/setup.cfg b/setup.cfg index 5117eccc043..490e6030e8b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -71,7 +71,7 @@ speedups = aiodns >= 3.3.0 Brotli; platform_python_implementation == 'CPython' brotlicffi; platform_python_implementation != 'CPython' - zstandard; platform_python_implementation == 'CPython' and python_version < "3.14" + backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" [options.packages.find] exclude = diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py index ec51a46b89c..7717e56f45e 100644 --- a/tests/test_http_parser.py +++ b/tests/test_http_parser.py @@ -34,13 +34,13 @@ except ImportError: brotli = None -if sys.version_info >= (3, 14): - import compression.zstd as zstandard # noqa: I900 -else: - try: - import zstandard - except ImportError: - zstandard = None # type: ignore[assignment] +try: + if sys.version_info >= (3, 14): + import compression.zstd as zstandard # noqa: I900 + else: + import backports.zstd as zstandard +except ImportError: + zstandard = None # type: ignore[assignment] REQUEST_PARSERS = [HttpRequestParserPy] RESPONSE_PARSERS = [HttpResponseParserPy] From bd9caa40723a39ce0602c8826a1d54f27de2cf61 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Oct 2025 10:57:32 +0000 Subject: [PATCH 19/51] Bump backports-zstd from 0.5.0 to 1.0.0 (#11629) Bumps [backports-zstd](https://github.com/rogdham/backports.zstd) from 0.5.0 to 1.0.0.
Changelog

Sourced from backports-zstd's changelog.

1.0.0 - 2025-10-10

:rocket: Added

  • Update code with CPython 3.14.0 version
  • Update type hints with typeshed aa5202465
  • Update pythoncapi-compat dependency
  • Allow to use libzstd present on the system with the --system-zstd build backend argument
  • Check the libzstd version during build and at runtime

:bug: Fixes

  • Fix import order issue by importing the tarfile and zipfile modules only when needed.
Commits
  • e244330 chore: prepare v1.0.0
  • d94dd5e feat: update code from upstream 3.14.0
  • 76f250c chore: update pythoncapi-compat
  • 0d55605 chore: update type hints from upstream
  • b7eb021 feat: support dynamic linking to libzstd
  • 17cc15b chore: fix PyPy build for EOL versions
  • 8aad94f feat: update code from upstream 3.14.0rc3
  • 0794429 Import tarfile/zipfile modules only when needed
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=backports-zstd&package-manager=pip&previous-version=0.5.0&new-version=1.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/base-ft.txt | 4 ++-- requirements/base.txt | 4 ++-- requirements/constraints.txt | 8 ++++---- requirements/dev.txt | 8 ++++---- requirements/lint.txt | 4 ++-- requirements/runtime-deps.txt | 4 ++-- requirements/test-ft.txt | 4 ++-- requirements/test.txt | 4 ++-- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/requirements/base-ft.txt b/requirements/base-ft.txt index 8173c3a3599..ec6026187e3 100644 --- a/requirements/base-ft.txt +++ b/requirements/base-ft.txt @@ -14,6 +14,8 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in +backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" + # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 @@ -46,5 +48,3 @@ typing-extensions==4.15.0 # multidict yarl==1.22.0 # via -r requirements/runtime-deps.in -backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" - # via -r requirements/runtime-deps.in diff --git a/requirements/base.txt b/requirements/base.txt index b3d43773885..0aedbd80e1d 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -14,6 +14,8 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in +backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" + # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 @@ -48,5 +50,3 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # via -r requirements/base.in yarl==1.22.0 # via -r requirements/runtime-deps.in -backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" - # via -r requirements/runtime-deps.in diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 433339af833..afbcf4f3c70 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -26,6 +26,10 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx +backports-zstd==1.0.0 ; implementation_name == "cpython" + # via + # -r requirements/lint.in + # -r requirements/runtime-deps.in blockbuster==1.5.25 # via # -r requirements/lint.in @@ -302,10 +306,6 @@ zlib-ng==1.0.0 # via # -r requirements/lint.in # -r requirements/test-common.in -backports.zstd==0.5.0 ; implementation_name == "cpython" - # via - # -r requirements/lint.in - # -r requirements/runtime-deps.in # The following packages are considered to be unsafe in a requirements file: pip==25.2 diff --git a/requirements/dev.txt b/requirements/dev.txt index f864f0c8ad2..ffef21330e5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -26,6 +26,10 @@ attrs==25.4.0 # via -r requirements/runtime-deps.in babel==2.17.0 # via sphinx +backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" + # via + # -r requirements/lint.in + # -r requirements/runtime-deps.in blockbuster==1.5.25 # via # -r requirements/lint.in @@ -293,10 +297,6 @@ zlib-ng==1.0.0 # via # -r requirements/lint.in # -r requirements/test-common.in -backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" - # via - # -r requirements/lint.in - # -r requirements/runtime-deps.in # The following packages are considered to be unsafe in a requirements file: pip==25.2 diff --git a/requirements/lint.txt b/requirements/lint.txt index e2ea2511ca4..8001e283a5a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -10,6 +10,8 @@ annotated-types==0.7.0 # via pydantic async-timeout==5.0.1 # via valkey +backports-zstd==1.0.0 ; implementation_name == "cpython" + # via -r requirements/lint.in blockbuster==1.5.25 # via -r requirements/lint.in cffi==2.0.0 @@ -121,5 +123,3 @@ virtualenv==20.34.0 # via pre-commit zlib-ng==1.0.0 # via -r requirements/lint.in -backports.zstd==0.5.0 ; implementation_name == "cpython" - # via -r requirements/lint.in diff --git a/requirements/runtime-deps.txt b/requirements/runtime-deps.txt index aea8b8bccb9..4481c13811c 100644 --- a/requirements/runtime-deps.txt +++ b/requirements/runtime-deps.txt @@ -14,6 +14,8 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in +backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" + # via -r requirements/runtime-deps.in brotli==1.1.0 ; platform_python_implementation == "CPython" # via -r requirements/runtime-deps.in cffi==2.0.0 @@ -42,5 +44,3 @@ typing-extensions==4.15.0 # multidict yarl==1.22.0 # via -r requirements/runtime-deps.in -backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" - # via -r requirements/runtime-deps.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 1c632ee5aaf..a6765b436dd 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -16,6 +16,8 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in +backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" + # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in brotli==1.1.0 ; platform_python_implementation == "CPython" @@ -152,5 +154,3 @@ yarl==1.22.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via -r requirements/test-common.in -backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" - # via -r requirements/runtime-deps.in diff --git a/requirements/test.txt b/requirements/test.txt index d91804223a8..1371ca40787 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -16,6 +16,8 @@ async-timeout==5.0.1 ; python_version < "3.11" # via -r requirements/runtime-deps.in attrs==25.4.0 # via -r requirements/runtime-deps.in +backports-zstd==1.0.0 ; platform_python_implementation == "CPython" and python_version < "3.14" + # via -r requirements/runtime-deps.in blockbuster==1.5.25 # via -r requirements/test-common.in brotli==1.1.0 ; platform_python_implementation == "CPython" @@ -154,5 +156,3 @@ yarl==1.22.0 # via -r requirements/runtime-deps.in zlib-ng==1.0.0 # via -r requirements/test-common.in -backports.zstd==0.5.0 ; platform_python_implementation == "CPython" and python_version < "3.14" - # via -r requirements/runtime-deps.in From 4cc90ee5377df338d1b4667d8cd6d5a32623558c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Oct 2025 11:13:50 +0000 Subject: [PATCH 20/51] Bump virtualenv from 20.34.0 to 20.35.1 (#11630) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.34.0 to 20.35.1.
Release notes

Sourced from virtualenv's releases.

20.35.1

What's Changed

Full Changelog: https://github.com/pypa/virtualenv/compare/20.34.0...20.35.1

20.35.0

What's Changed

Full Changelog: https://github.com/pypa/virtualenv/compare/20.34.0...20.35.0

Changelog

Sourced from virtualenv's changelog.

v20.35.1 (2025-10-09)

Bugfixes - 20.35.1

- Patch get_interpreter to handle missing cache and app_data
- by :user:`esafak` (:issue:`2972`)
- Fix backwards incompatible changes to ``PythonInfo`` - by
:user:`gaborbernat`. (:issue:`2975`)

v20.35.0 (2025-10-08)

Features - 20.35.0

  • Add AppData and Cache protocols to discovery for decoupling - by :user:esafak. (:issue:2074)
  • Ensure python3.exe and python3 on Windows for Python 3 - by :user:esafak. (:issue:2774)

Bugfixes - 20.35.0

- Replaced direct references to tcl/tk library paths with
getattr - by :user:`esafak` (:issue:`2944`)
- Restore absolute import of fs_is_case_sensitive - by :user:`esafak`.
(:issue:`2955`)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.34.0&new-version=20.35.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index afbcf4f3c70..5b082abd22b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -294,7 +294,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.34.0 +virtualenv==20.35.1 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index ffef21330e5..87bba281e79 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -285,7 +285,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.34.0 +virtualenv==20.35.1 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 8001e283a5a..f0c4fd12142 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -119,7 +119,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.34.0 +virtualenv==20.35.1 # via pre-commit zlib-ng==1.0.0 # via -r requirements/lint.in From b31c0bcb69819436246fa0d4a6c1ce099a9582cf Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Oct 2025 11:22:51 +0000 Subject: [PATCH 21/51] Bump rich from 14.1.0 to 14.2.0 (#11631) Bumps [rich](https://github.com/Textualize/rich) from 14.1.0 to 14.2.0.
Release notes

Sourced from rich's releases.

The Easy as Pi release

This release bumps Python compatibility to the just-released Python 3.14.

[14.2.0] - 2025-10-09

Changed

Changelog

Sourced from rich's changelog.

[14.2.0] - 2025-10-09

Changed

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=rich&package-manager=pip&previous-version=14.1.0&new-version=14.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 5b082abd22b..a68d5f1095c 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -216,7 +216,7 @@ requests==2.32.5 # cherry-picker # sphinx # sphinxcontrib-spelling -rich==14.1.0 +rich==14.2.0 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 87bba281e79..c69acd3d432 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -210,7 +210,7 @@ requests==2.32.5 # via # cherry-picker # sphinx -rich==14.1.0 +rich==14.2.0 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index f0c4fd12142..6440e6df3a4 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -90,7 +90,7 @@ python-on-whales==0.78.0 # via -r requirements/lint.in pyyaml==6.0.3 # via pre-commit -rich==14.1.0 +rich==14.2.0 # via pytest-codspeed six==1.17.0 # via python-dateutil diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 9e9cd9bc1ac..31ffb21d9d9 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -87,7 +87,7 @@ re-assert==1.1.0 # via -r requirements/test-common.in regex==2025.9.18 # via re-assert -rich==14.1.0 +rich==14.2.0 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test-common.in diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index a6765b436dd..5cbbdd81072 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -122,7 +122,7 @@ re-assert==1.1.0 # via -r requirements/test-common.in regex==2025.9.18 # via re-assert -rich==14.1.0 +rich==14.2.0 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test-common.in diff --git a/requirements/test.txt b/requirements/test.txt index 1371ca40787..0cbb107549c 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -122,7 +122,7 @@ re-assert==1.1.0 # via -r requirements/test-common.in regex==2025.9.18 # via re-assert -rich==14.1.0 +rich==14.2.0 # via pytest-codspeed setuptools-git==1.2 # via -r requirements/test-common.in From 0ce674901bb5fa8595e82e65cc60e16cd061cb4b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Oct 2025 11:54:04 +0000 Subject: [PATCH 22/51] Bump pypa/cibuildwheel from 3.2.0 to 3.2.1 (#11639) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pypa/cibuildwheel](https://github.com/pypa/cibuildwheel) from 3.2.0 to 3.2.1.
Release notes

Sourced from pypa/cibuildwheel's releases.

v3.2.1

  • 🛠 Update to CPython 3.14.0 final (#2614)
  • 🐛 Fix the default MACOSX_DEPLOYMENT_TARGET on Python 3.14 (#2613)
  • 📚 Docs improvements (#2617)
Changelog

Sourced from pypa/cibuildwheel's changelog.

v3.2.1

12 October 2025

  • 🛠 Update to CPython 3.14.0 final (#2614)
  • 🐛 Fix the default MACOSX_DEPLOYMENT_TARGET on Python 3.14 (#2613)
  • 📚 Docs improvements (#2617)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pypa/cibuildwheel&package-manager=github_actions&previous-version=3.2.0&new-version=3.2.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 1e1eb60a77c..300b605e997 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -426,7 +426,7 @@ jobs: run: | make cythonize - name: Build wheels - uses: pypa/cibuildwheel@v3.2.0 + uses: pypa/cibuildwheel@v3.2.1 env: CIBW_SKIP: pp* ${{ matrix.musl == 'musllinux' && '*manylinux*' || '*musllinux*' }} CIBW_ARCHS_MACOS: x86_64 arm64 universal2 From fa112797d746be145939c215924433ac22470412 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 13 Oct 2025 12:02:29 +0000 Subject: [PATCH 23/51] Bump virtualenv from 20.35.1 to 20.35.3 (#11640) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [virtualenv](https://github.com/pypa/virtualenv) from 20.35.1 to 20.35.3.
Release notes

Sourced from virtualenv's releases.

20.35.3

What's Changed

Full Changelog: https://github.com/pypa/virtualenv/compare/20.35.1...20.35.3

20.35.2

What's Changed

Full Changelog: https://github.com/pypa/virtualenv/compare/20.35.1...20.35.2

Changelog

Sourced from virtualenv's changelog.

v20.35.3 (2025-10-10)

Bugfixes - 20.35.3

- Accept RuntimeError in `test_too_many_open_files`, by
:user:`esafak` (:issue:`2935`)

v20.35.2 (2025-10-10)

Bugfixes - 20.35.2

  • Revert out changes related to the extraction of the discovery module - by :user:gaborbernat. (:issue:2978)
Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=virtualenv&package-manager=pip&previous-version=20.35.1&new-version=20.35.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index a68d5f1095c..e9a9284e741 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -294,7 +294,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.35.1 +virtualenv==20.35.3 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test-common.in diff --git a/requirements/dev.txt b/requirements/dev.txt index c69acd3d432..b0e34671b4a 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -285,7 +285,7 @@ uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpytho # -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.35.1 +virtualenv==20.35.3 # via pre-commit wait-for-it==2.3.0 # via -r requirements/test-common.in diff --git a/requirements/lint.txt b/requirements/lint.txt index 6440e6df3a4..541e03c469a 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -119,7 +119,7 @@ uvloop==0.21.0 ; platform_system != "Windows" # via -r requirements/lint.in valkey==6.1.1 # via -r requirements/lint.in -virtualenv==20.35.1 +virtualenv==20.35.3 # via pre-commit zlib-ng==1.0.0 # via -r requirements/lint.in From 040dac35254224d03652216538f49970335c0de9 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 13 Oct 2025 17:05:52 +0200 Subject: [PATCH 24/51] [PR #9951 backport][3.13] Move metadata to pyproject.toml (#11641) This is a backport of PR https://github.com/aio-libs/aiohttp/pull/9951 as merged into master (https://github.com/aio-libs/aiohttp/commit/6b5d8e7f6b92fe944930b12e16d4884d01d14323). The metadata changes: ```diff ... -Maintainer: aiohttp team -Maintainer-email: team@aiohttp.org +Maintainer-email: aiohttp team License: Apache-2.0 AND MIT -Home-page: https://github.com/aio-libs/aiohttp +Project-URL: Homepage, https://github.com/aio-libs/aiohttp ... ``` Modified the backport to include Python `3.9` in the classifier list again as well as updated `requires-python` to `>= 3.9`. --------- Co-authored-by: Sviatoslav Sydorenko --- CHANGES/9951.packaging.rst | 2 + MANIFEST.in | 3 +- pyproject.toml | 75 +++++++++++++++++++++++++++++++++++--- setup.cfg | 68 ---------------------------------- 4 files changed, 72 insertions(+), 76 deletions(-) create mode 100644 CHANGES/9951.packaging.rst diff --git a/CHANGES/9951.packaging.rst b/CHANGES/9951.packaging.rst new file mode 100644 index 00000000000..5f567d23fac --- /dev/null +++ b/CHANGES/9951.packaging.rst @@ -0,0 +1,2 @@ +Moved core packaging metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` +-- by :user:`cdce8p`. diff --git a/MANIFEST.in b/MANIFEST.in index 64cee139a1f..ea5d39d4722 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,8 +9,7 @@ graft examples graft tests graft tools graft requirements -recursive-include vendor * -global-include aiohttp *.pyi +graft vendor global-exclude *.pyc global-exclude *.pyd global-exclude *.so diff --git a/pyproject.toml b/pyproject.toml index df8b8465348..1cbfe81138e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,78 @@ [build-system] requires = [ "pkgconfig", - "setuptools >= 46.4.0", + # setuptools >= 67.0 required for Python 3.12+ support + # Next step should be >= 77.0 for PEP 639 support + # Don't bump too early to give distributors time to update + # their setuptools version. + "setuptools >= 67.0", ] build-backend = "setuptools.build_meta" +[project] +name = "aiohttp" +# TODO: Update to just 'license = "..."' once setuptools is bumped to >=77 +license = {text = "Apache-2.0 AND MIT"} +description = "Async http client/server framework (asyncio)" +readme = "README.rst" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Framework :: AsyncIO", + "Intended Audience :: Developers", + "Operating System :: POSIX", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Topic :: Internet :: WWW/HTTP", +] +requires-python = ">= 3.9" +dynamic = [ + "dependencies", + "optional-dependencies", + "version", +] + +[[project.maintainers]] +name = "aiohttp team" +email = "team@aiohttp.org" + +[project.urls] +"Homepage" = "https://github.com/aio-libs/aiohttp" +"Chat: Matrix" = "https://matrix.to/#/#aio-libs:matrix.org" +"Chat: Matrix Space" = "https://matrix.to/#/#aio-libs-space:matrix.org" +"CI: GitHub Actions" = "https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI" +"Coverage: codecov" = "https://codecov.io/github/aio-libs/aiohttp" +"Docs: Changelog" = "https://docs.aiohttp.org/en/stable/changes.html" +"Docs: RTD" = "https://docs.aiohttp.org" +"GitHub: issues" = "https://github.com/aio-libs/aiohttp/issues" +"GitHub: repo" = "https://github.com/aio-libs/aiohttp" + +[tool.setuptools] +license-files = [ + # TODO: Use 'project.license-files' instead once setuptools is bumped to >=77 + "LICENSE.txt", + "vendor/llhttp/LICENSE", +] + +[tool.setuptools.dynamic] +version = {attr = "aiohttp.__version__"} + +[tool.setuptools.packages.find] +include = [ + "aiohttp", + "aiohttp.*", +] + +[tool.setuptools.exclude-package-data] +"*" = ["*.c", "*.h"] + [tool.towncrier] package = "aiohttp" filename = "CHANGES.rst" @@ -88,8 +156,3 @@ ignore-words-list = 'te,ue' # TODO(3.13): Remove aiohttp.helpers once https://github.com/python/cpython/pull/106771 # is available in all supported cpython versions exclude-modules = "(^aiohttp\\.helpers)" - -[tool.black] -# TODO: Remove when project metadata is moved here. -# Black can read the value from [project.requires-python]. -target-version = ["py39", "py310", "py311", "py312"] diff --git a/setup.cfg b/setup.cfg index 490e6030e8b..a78ae609f1b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,56 +1,4 @@ -[metadata] -name = aiohttp -version = attr: aiohttp.__version__ -url = https://github.com/aio-libs/aiohttp -project_urls = - Chat: Matrix = https://matrix.to/#/#aio-libs:matrix.org - Chat: Matrix Space = https://matrix.to/#/#aio-libs-space:matrix.org - CI: GitHub Actions = https://github.com/aio-libs/aiohttp/actions?query=workflow%%3ACI - Coverage: codecov = https://codecov.io/github/aio-libs/aiohttp - Docs: Changelog = https://docs.aiohttp.org/en/stable/changes.html - Docs: RTD = https://docs.aiohttp.org - GitHub: issues = https://github.com/aio-libs/aiohttp/issues - GitHub: repo = https://github.com/aio-libs/aiohttp -description = Async http client/server framework (asyncio) -long_description = file: README.rst -long_description_content_type = text/x-rst -maintainer = aiohttp team -maintainer_email = team@aiohttp.org -license = Apache-2.0 AND MIT -license_files = - LICENSE.txt - vendor/llhttp/LICENSE -classifiers = - Development Status :: 5 - Production/Stable - - Framework :: AsyncIO - - Intended Audience :: Developers - - Operating System :: POSIX - Operating System :: MacOS :: MacOS X - Operating System :: Microsoft :: Windows - - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - Programming Language :: Python :: 3.12 - Programming Language :: Python :: 3.13 - Programming Language :: Python :: 3.14 - - Topic :: Internet :: WWW/HTTP - [options] -python_requires = >=3.9 -packages = - aiohttp - aiohttp._websocket -# https://setuptools.readthedocs.io/en/latest/setuptools.html#setting-the-zip-safe-flag -zip_safe = False -include_package_data = True - install_requires = aiohappyeyeballs >= 2.5.0 aiosignal >= 1.4.0 @@ -61,11 +9,6 @@ install_requires = propcache >= 0.2.0 yarl >= 1.17.0, < 2.0 -[options.exclude_package_data] -* = - *.c - *.h - [options.extras_require] speedups = aiodns >= 3.3.0 @@ -73,17 +16,6 @@ speedups = brotlicffi; platform_python_implementation != 'CPython' backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" -[options.packages.find] -exclude = - examples - -[options.package_data] -# Ref: -# https://setuptools.readthedocs.io/en/latest/setuptools.html#options -# (see notes for the asterisk/`*` meaning) -* = - *.so - [pep8] max-line-length=79 From dc04bbc67b05bd1703519ef8be1541ff3d21b126 Mon Sep 17 00:00:00 2001 From: Marc Mueller <30130371+cdce8p@users.noreply.github.com> Date: Mon, 13 Oct 2025 17:11:52 +0200 Subject: [PATCH 25/51] [PR #9951 backport][3.14] Move metadata to pyproject.toml (#11642) This is a backport of PR https://github.com/aio-libs/aiohttp/pull/9951 as merged into master (https://github.com/aio-libs/aiohttp/commit/6b5d8e7f6b92fe944930b12e16d4884d01d14323). The metadata changes: ```diff ... -Maintainer: aiohttp team -Maintainer-email: team@aiohttp.org +Maintainer-email: aiohttp team License: Apache-2.0 AND MIT -Home-page: https://github.com/aio-libs/aiohttp +Project-URL: Homepage, https://github.com/aio-libs/aiohttp ... ``` Co-authored-by: Sviatoslav Sydorenko --- CHANGES/9951.packaging.rst | 2 ++ MANIFEST.in | 3 +- pyproject.toml | 74 ++++++++++++++++++++++++++++++++++---- setup.cfg | 67 ---------------------------------- 4 files changed, 71 insertions(+), 75 deletions(-) create mode 100644 CHANGES/9951.packaging.rst diff --git a/CHANGES/9951.packaging.rst b/CHANGES/9951.packaging.rst new file mode 100644 index 00000000000..5f567d23fac --- /dev/null +++ b/CHANGES/9951.packaging.rst @@ -0,0 +1,2 @@ +Moved core packaging metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` +-- by :user:`cdce8p`. diff --git a/MANIFEST.in b/MANIFEST.in index 64cee139a1f..ea5d39d4722 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -9,8 +9,7 @@ graft examples graft tests graft tools graft requirements -recursive-include vendor * -global-include aiohttp *.pyi +graft vendor global-exclude *.pyc global-exclude *.pyd global-exclude *.so diff --git a/pyproject.toml b/pyproject.toml index 81a3009e445..fa6da9c82dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,77 @@ [build-system] requires = [ "pkgconfig", - "setuptools >= 46.4.0", + # setuptools >= 67.0 required for Python 3.12+ support + # Next step should be >= 77.0 for PEP 639 support + # Don't bump too early to give distributors time to update + # their setuptools version. + "setuptools >= 67.0", ] build-backend = "setuptools.build_meta" +[project] +name = "aiohttp" +# TODO: Update to just 'license = "..."' once setuptools is bumped to >=77 +license = {text = "Apache-2.0 AND MIT"} +description = "Async http client/server framework (asyncio)" +readme = "README.rst" +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Framework :: AsyncIO", + "Intended Audience :: Developers", + "Operating System :: POSIX", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", + "Topic :: Internet :: WWW/HTTP", +] +requires-python = ">= 3.10" +dynamic = [ + "dependencies", + "optional-dependencies", + "version", +] + +[[project.maintainers]] +name = "aiohttp team" +email = "team@aiohttp.org" + +[project.urls] +"Homepage" = "https://github.com/aio-libs/aiohttp" +"Chat: Matrix" = "https://matrix.to/#/#aio-libs:matrix.org" +"Chat: Matrix Space" = "https://matrix.to/#/#aio-libs-space:matrix.org" +"CI: GitHub Actions" = "https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI" +"Coverage: codecov" = "https://codecov.io/github/aio-libs/aiohttp" +"Docs: Changelog" = "https://docs.aiohttp.org/en/stable/changes.html" +"Docs: RTD" = "https://docs.aiohttp.org" +"GitHub: issues" = "https://github.com/aio-libs/aiohttp/issues" +"GitHub: repo" = "https://github.com/aio-libs/aiohttp" + +[tool.setuptools] +license-files = [ + # TODO: Use 'project.license-files' instead once setuptools is bumped to >=77 + "LICENSE.txt", + "vendor/llhttp/LICENSE", +] + +[tool.setuptools.dynamic] +version = {attr = "aiohttp.__version__"} + +[tool.setuptools.packages.find] +include = [ + "aiohttp", + "aiohttp.*", +] + +[tool.setuptools.exclude-package-data] +"*" = ["*.c", "*.h"] + [tool.towncrier] package = "aiohttp" filename = "CHANGES.rst" @@ -88,8 +155,3 @@ ignore-words-list = 'te,ue' # TODO(3.13): Remove aiohttp.helpers once https://github.com/python/cpython/pull/106771 # is available in all supported cpython versions exclude-modules = "(^aiohttp\\.helpers)" - -[tool.black] -# TODO: Remove when project metadata is moved here. -# Black can read the value from [project.requires-python]. -target-version = ["py310", "py311", "py312"] diff --git a/setup.cfg b/setup.cfg index e54010f82a1..4e49e33f304 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,55 +1,4 @@ -[metadata] -name = aiohttp -version = attr: aiohttp.__version__ -url = https://github.com/aio-libs/aiohttp -project_urls = - Chat: Matrix = https://matrix.to/#/#aio-libs:matrix.org - Chat: Matrix Space = https://matrix.to/#/#aio-libs-space:matrix.org - CI: GitHub Actions = https://github.com/aio-libs/aiohttp/actions?query=workflow%%3ACI - Coverage: codecov = https://codecov.io/github/aio-libs/aiohttp - Docs: Changelog = https://docs.aiohttp.org/en/stable/changes.html - Docs: RTD = https://docs.aiohttp.org - GitHub: issues = https://github.com/aio-libs/aiohttp/issues - GitHub: repo = https://github.com/aio-libs/aiohttp -description = Async http client/server framework (asyncio) -long_description = file: README.rst -long_description_content_type = text/x-rst -maintainer = aiohttp team -maintainer_email = team@aiohttp.org -license = Apache-2.0 AND MIT -license_files = - LICENSE.txt - vendor/llhttp/LICENSE -classifiers = - Development Status :: 5 - Production/Stable - - Framework :: AsyncIO - - Intended Audience :: Developers - - Operating System :: POSIX - Operating System :: MacOS :: MacOS X - Operating System :: Microsoft :: Windows - - Programming Language :: Python - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - Programming Language :: Python :: 3.12 - Programming Language :: Python :: 3.13 - Programming Language :: Python :: 3.14 - - Topic :: Internet :: WWW/HTTP - [options] -python_requires = >=3.10 -packages = - aiohttp - aiohttp._websocket -# https://setuptools.readthedocs.io/en/latest/setuptools.html#setting-the-zip-safe-flag -zip_safe = False -include_package_data = True - install_requires = aiohappyeyeballs >= 2.5.0 aiosignal >= 1.4.0 @@ -60,11 +9,6 @@ install_requires = propcache >= 0.2.0 yarl >= 1.17.0, < 2.0 -[options.exclude_package_data] -* = - *.c - *.h - [options.extras_require] speedups = aiodns >= 3.3.0 @@ -72,17 +16,6 @@ speedups = brotlicffi; platform_python_implementation != 'CPython' backports.zstd; platform_python_implementation == 'CPython' and python_version < "3.14" -[options.packages.find] -exclude = - examples - -[options.package_data] -# Ref: -# https://setuptools.readthedocs.io/en/latest/setuptools.html#options -# (see notes for the asterisk/`*` meaning) -* = - *.so - [pep8] max-line-length=79 From e6d937325dcb0098f5b5ed6ec31e83085fa4d42f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Oct 2025 10:59:58 +0000 Subject: [PATCH 26/51] Bump actions/setup-node from 5 to 6 (#11648) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [actions/setup-node](https://github.com/actions/setup-node) from 5 to 6.
Release notes

Sourced from actions/setup-node's releases.

v6.0.0

What's Changed

Breaking Changes

Dependency Upgrades

Full Changelog: https://github.com/actions/setup-node/compare/v5...v6.0.0

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=actions/setup-node&package-manager=github_actions&previous-version=5&new-version=6)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/ci-cd.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 300b605e997..ec834c6e843 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -103,7 +103,7 @@ jobs: path: vendor/llhttp/build - name: Setup NodeJS if: steps.cache.outputs.cache-hit != 'true' - uses: actions/setup-node@v5 + uses: actions/setup-node@v6 with: node-version: 18 - name: Generate llhttp sources From 751c303f0c323b88dac4c0be6b6e975cd04a6a3b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Oct 2025 11:02:18 +0000 Subject: [PATCH 27/51] Bump pydantic from 2.12.0 to 2.12.1 (#11649) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.12.0 to 2.12.1.
Release notes

Sourced from pydantic's releases.

v2.12.1 2025-10-13

v2.12.1 (2025-10-13)

GitHub release

What's Changed

This is the first 2.12 patch release, addressing most (but not all yet) regressions from the initial 2.12.0 release.

Fixes

New Contributors

Full Changelog: https://github.com/pydantic/pydantic/compare/v2.12.0...v2.12.1

Changelog

Sourced from pydantic's changelog.

v2.12.1 (2025-10-13)

GitHub release

What's Changed

This is the first 2.12 patch release, addressing most (but not all yet) regressions from the initial 2.12.0 release.

Fixes

New Contributors

Commits
  • b4076c6 Prepare release 2.12.1
  • b67f072 Bump pydantic-core to v2.41.3
  • 529f7dd Fix error message for invalid validator signatures
  • 445fa79 Backport V1 runtime warning
  • b3dba9b Make sure None is converted as NoneType in Python 3.14
  • 1e8c41e Do not evaluate annotations when inspecting validators and serializers
  • e2a199f Upgrade dependencies for 3.14
  • 79353e6 Fix spelling in model_dump() docstring
  • aa6b6cd Fix typo in experimental.md documentation
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.12.0&new-version=2.12.1)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test-common.txt | 4 ++-- requirements/test-ft.txt | 4 ++-- requirements/test.txt | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e9a9284e741..e08535deb09 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -160,9 +160,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.0 +pydantic==2.12.1 # via python-on-whales -pydantic-core==2.41.1 +pydantic-core==2.41.3 # via pydantic pyenchant==3.3.0 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index b0e34671b4a..12e23286d3b 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -157,9 +157,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.0 +pydantic==2.12.1 # via python-on-whales -pydantic-core==2.41.1 +pydantic-core==2.41.3 # via pydantic pygments==2.19.2 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 541e03c469a..c65871b2b26 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -67,9 +67,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.0 +pydantic==2.12.1 # via python-on-whales -pydantic-core==2.41.1 +pydantic-core==2.41.3 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 31ffb21d9d9..6189c205496 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -56,9 +56,9 @@ proxy-py==2.4.10 # via -r requirements/test-common.in pycparser==2.23 # via cffi -pydantic==2.12.0 +pydantic==2.12.1 # via python-on-whales -pydantic-core==2.41.1 +pydantic-core==2.41.3 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 5cbbdd81072..5097d946c86 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -91,9 +91,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.0 +pydantic==2.12.1 # via python-on-whales -pydantic-core==2.41.1 +pydantic-core==2.41.3 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test.txt b/requirements/test.txt index 0cbb107549c..f3fc2f01683 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -91,9 +91,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.0 +pydantic==2.12.1 # via python-on-whales -pydantic-core==2.41.1 +pydantic-core==2.41.3 # via pydantic pygments==2.19.2 # via From 2fd4b40457790c094568a66984d20fe0330f43d3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Oct 2025 11:09:56 +0000 Subject: [PATCH 28/51] Bump charset-normalizer from 3.4.3 to 3.4.4 (#11650) Bumps [charset-normalizer](https://github.com/jawah/charset_normalizer) from 3.4.3 to 3.4.4.
Release notes

Sourced from charset-normalizer's releases.

Version 3.4.4

3.4.4 (2025-10-13)

Changed

  • Bound setuptools to a specific constraint setuptools>=68,<=81.
  • Raised upper bound of mypyc for the optional pre-built extension to v1.18.2

Removed

  • setuptools-scm as a build dependency.

Misc

  • Enforced hashes in dev-requirements.txt and created ci-requirements.txt for security purposes.
  • Additional pre-built wheels for riscv64, s390x, and armv7l architectures.
  • Restore multiple.intoto.jsonl in GitHub releases in addition to individual attestation file per wheel.
Changelog

Sourced from charset-normalizer's changelog.

3.4.4 (2025-10-13)

Changed

  • Bound setuptools to a specific constraint setuptools>=68,<=81.
  • Raised upper bound of mypyc for the optional pre-built extension to v1.18.2

Removed

  • setuptools-scm as a build dependency.

Misc

  • Enforced hashes in dev-requirements.txt and created ci-requirements.txt for security purposes.
  • Additional pre-built wheels for riscv64, s390x, and armv7l architectures.
  • Restore multiple.intoto.jsonl in GitHub releases in addition to individual attestation file per wheel.
Commits
  • b30ffdc :wrench: fix checksum step in cd.yml
  • d3fbfcf :wrench: fix cd.yml
  • dafbb95 Release 3.4.4 (#658)
  • 1f18ffa :arrow_up: raise mypy upper bound to 1.18.2
  • ef4ac69 Merge branch 'release-3.4.4' of github.com:jawah/charset_normalizer into rele...
  • 4b35dda :pencil: write changelog for 3.4.4
  • 0ec6452 :wrench: update cd.yml workflow (add riscv64, s390x and armv7l)
  • f341ede :arrow_up: upgrade dependencies (dev, ci)
  • a308841 :pencil: write changelog for 3.4.4
  • 9c906da :wrench: update cd.yml workflow (add riscv64, s390x and armv7l)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=charset-normalizer&package-manager=pip&previous-version=3.4.3&new-version=3.4.4)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/doc-spelling.txt | 2 +- requirements/doc.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e08535deb09..76688a35a8d 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -47,7 +47,7 @@ cffi==2.0.0 # pytest-codspeed cfgv==3.4.0 # via pre-commit -charset-normalizer==3.4.3 +charset-normalizer==3.4.4 # via requests cherry-picker==2.6.0 # via -r requirements/dev.in diff --git a/requirements/dev.txt b/requirements/dev.txt index 12e23286d3b..433be275383 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -47,7 +47,7 @@ cffi==2.0.0 # pytest-codspeed cfgv==3.4.0 # via pre-commit -charset-normalizer==3.4.3 +charset-normalizer==3.4.4 # via requests cherry-picker==2.6.0 # via -r requirements/dev.in diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt index 95bc1d18964..730accfac89 100644 --- a/requirements/doc-spelling.txt +++ b/requirements/doc-spelling.txt @@ -12,7 +12,7 @@ babel==2.17.0 # via sphinx certifi==2025.10.5 # via requests -charset-normalizer==3.4.3 +charset-normalizer==3.4.4 # via requests click==8.3.0 # via towncrier diff --git a/requirements/doc.txt b/requirements/doc.txt index 9369ca3daf3..d69dd77eafc 100644 --- a/requirements/doc.txt +++ b/requirements/doc.txt @@ -12,7 +12,7 @@ babel==2.17.0 # via sphinx certifi==2025.10.5 # via requests -charset-normalizer==3.4.3 +charset-normalizer==3.4.4 # via requests click==8.3.0 # via towncrier From c7b2015a782367ddf6cee050d67e5fc971d48979 Mon Sep 17 00:00:00 2001 From: Cycloctane Date: Wed, 15 Oct 2025 06:43:04 +0800 Subject: [PATCH 29/51] [PR #11633/b1bd65d backport][3.14] Make AppRunner's configuration options available in run_app() (#11653) (cherry picked from commit b1bd65dbd191b318325c4b87d626bb5d7967cda7) --- CHANGES/11633.feature.rst | 2 ++ aiohttp/web.py | 21 ++++----------------- docs/web_reference.rst | 6 +++++- tests/test_run_app.py | 25 ++++++++++++++++--------- 4 files changed, 27 insertions(+), 27 deletions(-) create mode 100644 CHANGES/11633.feature.rst diff --git a/CHANGES/11633.feature.rst b/CHANGES/11633.feature.rst new file mode 100644 index 00000000000..374d15a2acd --- /dev/null +++ b/CHANGES/11633.feature.rst @@ -0,0 +1,2 @@ +Make configuration options in ``AppRunner`` also available in ``run_app()`` +-- by :user:`Cycloctane`. diff --git a/aiohttp/web.py b/aiohttp/web.py index a77447d3a2e..241207ed709 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -297,18 +297,12 @@ async def _run_app( port: int | None = None, path: PathLike | TypingIterable[PathLike] | None = None, sock: socket.socket | TypingIterable[socket.socket] | None = None, - shutdown_timeout: float = 60.0, - keepalive_timeout: float = 75.0, ssl_context: SSLContext | None = None, print: Callable[..., None] | None = print, backlog: int = 128, - access_log_class: type[AbstractAccessLogger] = AccessLogger, - access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: logging.Logger | None = access_logger, - handle_signals: bool = True, reuse_address: bool | None = None, reuse_port: bool | None = None, - handler_cancellation: bool = False, + **kwargs: Any, # TODO(PY311): Use Unpack ) -> None: # An internal function to actually do all dirty job for application running if asyncio.iscoroutine(app): @@ -316,16 +310,7 @@ async def _run_app( app = cast(Application, app) - runner = AppRunner( - app, - handle_signals=handle_signals, - access_log_class=access_log_class, - access_log_format=access_log_format, - access_log=access_log, - keepalive_timeout=keepalive_timeout, - shutdown_timeout=shutdown_timeout, - handler_cancellation=handler_cancellation, - ) + runner = AppRunner(app, **kwargs) await runner.setup() @@ -472,6 +457,7 @@ def run_app( reuse_port: bool | None = None, handler_cancellation: bool = False, loop: asyncio.AbstractEventLoop | None = None, + **kwargs: Any, ) -> None: """Run an app locally""" if loop is None: @@ -503,6 +489,7 @@ def run_app( reuse_address=reuse_address, reuse_port=reuse_port, handler_cancellation=handler_cancellation, + **kwargs, ) ) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 2d1882da17c..4400765b072 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -3060,7 +3060,8 @@ Utilities handle_signals=True, \ reuse_address=None, \ reuse_port=None, \ - handler_cancellation=False) + handler_cancellation=False, \ + **kwargs) A high-level function for running an application, serving it until keyboard interrupt and performing a @@ -3170,6 +3171,9 @@ Utilities scalability is a concern. :ref:`aiohttp-web-peer-disconnection` + :param kwargs: additional named parameters to pass into + :class:`AppRunner` constructor. + .. versionadded:: 3.0 Support *access_log_class* parameter. diff --git a/tests/test_run_app.py b/tests/test_run_app.py index 206eafde6b0..6cb19b1def2 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -884,22 +884,29 @@ async def on_startup(app): exc_handler.assert_called_with(patched_loop, msg) -def test_run_app_keepalive_timeout(patched_loop, mocker, monkeypatch): - new_timeout = 1234 +@pytest.mark.parametrize( + "param", + ( + "keepalive_timeout", + "max_line_size", + "max_headers", + "max_field_size", + "lingering_time", + "read_bufsize", + "auto_decompress", + ), +) +def test_run_app_pass_apprunner_kwargs(param, patched_loop, monkeypatch): + m = mock.Mock() base_runner_init_orig = BaseRunner.__init__ def base_runner_init_spy(self, *args, **kwargs): - assert kwargs["keepalive_timeout"] == new_timeout + assert kwargs[param] is m base_runner_init_orig(self, *args, **kwargs) app = web.Application() monkeypatch.setattr(BaseRunner, "__init__", base_runner_init_spy) - web.run_app( - app, - keepalive_timeout=new_timeout, - print=stopper(patched_loop), - loop=patched_loop, - ) + web.run_app(app, print=stopper(patched_loop), loop=patched_loop, **{param: m}) def test_run_app_context_vars(patched_loop): From b39fd630dfc2fbdd46caf16debe08b60e767237a Mon Sep 17 00:00:00 2001 From: Cycloctane Date: Wed, 15 Oct 2025 06:43:39 +0800 Subject: [PATCH 30/51] [PR #11633/b1bd65d backport][3.13] Make AppRunner's configuration options available in run_app() (#11652) (cherry picked from commit b1bd65dbd191b318325c4b87d626bb5d7967cda7) --- CHANGES/11633.feature.rst | 2 ++ aiohttp/web.py | 21 ++++----------------- docs/web_reference.rst | 6 +++++- tests/test_run_app.py | 25 ++++++++++++++++--------- 4 files changed, 27 insertions(+), 27 deletions(-) create mode 100644 CHANGES/11633.feature.rst diff --git a/CHANGES/11633.feature.rst b/CHANGES/11633.feature.rst new file mode 100644 index 00000000000..374d15a2acd --- /dev/null +++ b/CHANGES/11633.feature.rst @@ -0,0 +1,2 @@ +Make configuration options in ``AppRunner`` also available in ``run_app()`` +-- by :user:`Cycloctane`. diff --git a/aiohttp/web.py b/aiohttp/web.py index 8307ff405ca..5a1fc964172 100644 --- a/aiohttp/web.py +++ b/aiohttp/web.py @@ -309,18 +309,12 @@ async def _run_app( port: Optional[int] = None, path: Union[PathLike, TypingIterable[PathLike], None] = None, sock: Optional[Union[socket.socket, TypingIterable[socket.socket]]] = None, - shutdown_timeout: float = 60.0, - keepalive_timeout: float = 75.0, ssl_context: Optional[SSLContext] = None, print: Optional[Callable[..., None]] = print, backlog: int = 128, - access_log_class: Type[AbstractAccessLogger] = AccessLogger, - access_log_format: str = AccessLogger.LOG_FORMAT, - access_log: Optional[logging.Logger] = access_logger, - handle_signals: bool = True, reuse_address: Optional[bool] = None, reuse_port: Optional[bool] = None, - handler_cancellation: bool = False, + **kwargs: Any, # TODO(PY311): Use Unpack ) -> None: # An internal function to actually do all dirty job for application running if asyncio.iscoroutine(app): @@ -328,16 +322,7 @@ async def _run_app( app = cast(Application, app) - runner = AppRunner( - app, - handle_signals=handle_signals, - access_log_class=access_log_class, - access_log_format=access_log_format, - access_log=access_log, - keepalive_timeout=keepalive_timeout, - shutdown_timeout=shutdown_timeout, - handler_cancellation=handler_cancellation, - ) + runner = AppRunner(app, **kwargs) await runner.setup() @@ -484,6 +469,7 @@ def run_app( reuse_port: Optional[bool] = None, handler_cancellation: bool = False, loop: Optional[asyncio.AbstractEventLoop] = None, + **kwargs: Any, ) -> None: """Run an app locally""" if loop is None: @@ -515,6 +501,7 @@ def run_app( reuse_address=reuse_address, reuse_port=reuse_port, handler_cancellation=handler_cancellation, + **kwargs, ) ) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 2d1882da17c..4400765b072 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -3060,7 +3060,8 @@ Utilities handle_signals=True, \ reuse_address=None, \ reuse_port=None, \ - handler_cancellation=False) + handler_cancellation=False, \ + **kwargs) A high-level function for running an application, serving it until keyboard interrupt and performing a @@ -3170,6 +3171,9 @@ Utilities scalability is a concern. :ref:`aiohttp-web-peer-disconnection` + :param kwargs: additional named parameters to pass into + :class:`AppRunner` constructor. + .. versionadded:: 3.0 Support *access_log_class* parameter. diff --git a/tests/test_run_app.py b/tests/test_run_app.py index eeb28d6ff41..c4c4d1784d9 100644 --- a/tests/test_run_app.py +++ b/tests/test_run_app.py @@ -893,22 +893,29 @@ async def on_startup(app): exc_handler.assert_called_with(patched_loop, msg) -def test_run_app_keepalive_timeout(patched_loop, mocker, monkeypatch): - new_timeout = 1234 +@pytest.mark.parametrize( + "param", + ( + "keepalive_timeout", + "max_line_size", + "max_headers", + "max_field_size", + "lingering_time", + "read_bufsize", + "auto_decompress", + ), +) +def test_run_app_pass_apprunner_kwargs(param, patched_loop, monkeypatch): + m = mock.Mock() base_runner_init_orig = BaseRunner.__init__ def base_runner_init_spy(self, *args, **kwargs): - assert kwargs["keepalive_timeout"] == new_timeout + assert kwargs[param] is m base_runner_init_orig(self, *args, **kwargs) app = web.Application() monkeypatch.setattr(BaseRunner, "__init__", base_runner_init_spy) - web.run_app( - app, - keepalive_timeout=new_timeout, - print=stopper(patched_loop), - loop=patched_loop, - ) + web.run_app(app, print=stopper(patched_loop), loop=patched_loop, **{param: m}) def test_run_app_context_vars(patched_loop): From 37da3f5ee609aeb0d94640b46f55dee1ffa6b071 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 14 Oct 2025 23:55:27 +0000 Subject: [PATCH 31/51] Bump platformdirs from 4.4.0 to 4.5.0 (#11622) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [platformdirs](https://github.com/tox-dev/platformdirs) from 4.4.0 to 4.5.0.
Release notes

Sourced from platformdirs's releases.

4.5.0

What's Changed

New Contributors

Full Changelog: https://github.com/tox-dev/platformdirs/compare/4.4.0...4.5.0

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=platformdirs&package-manager=pip&previous-version=4.4.0&new-version=4.5.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 76688a35a8d..e28cd9c6ce1 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -142,7 +142,7 @@ pip-tools==7.5.1 # via -r requirements/dev.in pkgconfig==1.5.5 # via -r requirements/test-common.in -platformdirs==4.4.0 +platformdirs==4.5.0 # via virtualenv pluggy==1.6.0 # via diff --git a/requirements/dev.txt b/requirements/dev.txt index 433be275383..ec0ddf4cb94 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -139,7 +139,7 @@ pip-tools==7.5.1 # via -r requirements/dev.in pkgconfig==1.5.5 # via -r requirements/test-common.in -platformdirs==4.4.0 +platformdirs==4.5.0 # via virtualenv pluggy==1.6.0 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index c65871b2b26..12737459b6c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -57,7 +57,7 @@ packaging==25.0 # via pytest pathspec==0.12.1 # via mypy -platformdirs==4.4.0 +platformdirs==4.5.0 # via virtualenv pluggy==1.6.0 # via pytest From 325b680c28fe1adf154d6afa7b50e920793c3f22 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 00:00:44 +0000 Subject: [PATCH 32/51] Bump markdown-it-py from 3.0.0 to 4.0.0 (#11595) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [markdown-it-py](https://github.com/executablebooks/markdown-it-py) from 3.0.0 to 4.0.0.
Release notes

Sourced from markdown-it-py's releases.

v4.0.0

What's Changed

This primarily drops support for Python 3.8 and 3.9, adds support for Python 3.13, and updates the parser to comply with Commonmark 0.31.2 and Markdown-It v14.1.0.

Upgrades

Improvements

Bug fixes

Maintenance

Documentation

... (truncated)

Changelog

Sourced from markdown-it-py's changelog.

4.0.0 - 2024-08-10

This primarily drops support for Python 3.9, adds support for Python 3.13, and updates the parser to comply with Commonmark 0.31.2 and Markdown-It v14.1.0.

  • ⬆️ Drop support for Python 3.9 in #360
  • ⬆️ Comply with Commonmark 0.31.2 in #362
  • 👌 Improve performance of "text" inline rule in #347
  • 👌 Use str.removesuffix in #348
  • 👌 limit the number of autocompleted cells in a table in #364
  • 👌 fix quadratic complexity in reference parser in #367
  • 🐛 Fix emphasis inside raw links bugs in #320

Full Changelog: https://github.com/executablebooks/markdown-it-py/compare/v3.0.0...v4.0.0

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=markdown-it-py&package-manager=pip&previous-version=3.0.0&new-version=4.0.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/lint.txt | 2 +- requirements/test.txt | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index e28cd9c6ce1..431d707ac4a 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -111,7 +111,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -markdown-it-py==3.0.0 +markdown-it-py==4.0.0 # via rich markupsafe==3.0.3 # via jinja2 diff --git a/requirements/dev.txt b/requirements/dev.txt index ec0ddf4cb94..e1b8083cba3 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -109,7 +109,7 @@ jinja2==3.1.6 # via # sphinx # towncrier -markdown-it-py==3.0.0 +markdown-it-py==4.0.0 # via rich markupsafe==3.0.3 # via jinja2 diff --git a/requirements/lint.txt b/requirements/lint.txt index 12737459b6c..4c36b35d208 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -43,7 +43,7 @@ iniconfig==2.1.0 # via pytest isal==1.7.2 # via -r requirements/lint.in -markdown-it-py==3.0.0 +markdown-it-py==4.0.0 # via rich mdurl==0.1.2 # via markdown-it-py diff --git a/requirements/test.txt b/requirements/test.txt index f3fc2f01683..2f7689f001f 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -57,7 +57,7 @@ iniconfig==2.1.0 # via pytest isal==1.7.2 ; python_version < "3.14" # via -r requirements/test-common.in -markdown-it-py==3.0.0 +markdown-it-py==4.0.0 # via rich mdurl==0.1.2 # via markdown-it-py From af1661e1d5d56a9629cf713cf6dbb0e60c573a78 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 10:47:16 +0000 Subject: [PATCH 33/51] Bump pydantic from 2.12.1 to 2.12.2 (#11657) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.12.1 to 2.12.2.
Release notes

Sourced from pydantic's releases.

v2.12.2 2025-10-14

v2.12.2 (2025-10-14)

What's Changed

Fixes

  • Release a new pydantic-core version, as a corrupted CPython 3.10 manylinux2014_aarch64 wheel got uploaded (pydantic-core#1843).
  • Fix issue with recursive generic models with a parent model class by @​Viicos in #12398

Full Changelog: https://github.com/pydantic/pydantic/compare/v2.12.1...v2.12.2

Changelog

Sourced from pydantic's changelog.

v2.12.2 (2025-10-14)

GitHub release

What's Changed

Fixes

  • Release a new pydantic-core version, as a corrupted CPython 3.10 manylinux2014_aarch64 wheel got uploaded (pydantic-core#1843).
  • Fix issue with recursive generic models with a parent model class by @​Viicos in #12398
Commits
  • 1e616a3 Prepare release v2.12.2
  • dc302e2 Fix issue with recursive generic models with a parent model class
  • 6876485 Bump pydantic-core to v2.41.4
  • See full diff in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=pydantic&package-manager=pip&previous-version=2.12.1&new-version=2.12.2)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 4 ++-- requirements/dev.txt | 4 ++-- requirements/lint.txt | 4 ++-- requirements/test-common.txt | 4 ++-- requirements/test-ft.txt | 4 ++-- requirements/test.txt | 4 ++-- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 431d707ac4a..188972f614e 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -160,9 +160,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.1 +pydantic==2.12.2 # via python-on-whales -pydantic-core==2.41.3 +pydantic-core==2.41.4 # via pydantic pyenchant==3.3.0 # via sphinxcontrib-spelling diff --git a/requirements/dev.txt b/requirements/dev.txt index e1b8083cba3..7778f6c9bb5 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -157,9 +157,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.1 +pydantic==2.12.2 # via python-on-whales -pydantic-core==2.41.3 +pydantic-core==2.41.4 # via pydantic pygments==2.19.2 # via diff --git a/requirements/lint.txt b/requirements/lint.txt index 4c36b35d208..d2be0368f6c 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -67,9 +67,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.1 +pydantic==2.12.2 # via python-on-whales -pydantic-core==2.41.3 +pydantic-core==2.41.4 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 6189c205496..7843ec10c46 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -56,9 +56,9 @@ proxy-py==2.4.10 # via -r requirements/test-common.in pycparser==2.23 # via cffi -pydantic==2.12.1 +pydantic==2.12.2 # via python-on-whales -pydantic-core==2.41.3 +pydantic-core==2.41.4 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 5097d946c86..a157a44efda 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -91,9 +91,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.1 +pydantic==2.12.2 # via python-on-whales -pydantic-core==2.41.3 +pydantic-core==2.41.4 # via pydantic pygments==2.19.2 # via diff --git a/requirements/test.txt b/requirements/test.txt index 2f7689f001f..60c2b77e8bb 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -91,9 +91,9 @@ pycares==4.11.0 # via aiodns pycparser==2.23 # via cffi -pydantic==2.12.1 +pydantic==2.12.2 # via python-on-whales -pydantic-core==2.41.3 +pydantic-core==2.41.4 # via pydantic pygments==2.19.2 # via From 1e24afc9556736a36c0301e4ba1be33be9fdf242 Mon Sep 17 00:00:00 2001 From: Samuel Gaist Date: Wed, 15 Oct 2025 12:54:32 +0000 Subject: [PATCH 34/51] [PR #11580/d261f8a backport][3.14] Ensure that application/octet-stream is the default content_type (#11655) (cherry picked from commit d261f8a3ceaeb9699c13d6f62be9ea875f6f40bc) --- CHANGES/10889.bugfix.rst | 4 ++++ CONTRIBUTORS.txt | 1 + aiohttp/helpers.py | 32 ++++++++++++++++++++++++++++++-- docs/client_reference.rst | 12 +++++------- tests/test_helpers.py | 27 ++++++++++++++++++++++++++- tests/test_web_response.py | 4 ++-- 6 files changed, 68 insertions(+), 12 deletions(-) create mode 100644 CHANGES/10889.bugfix.rst diff --git a/CHANGES/10889.bugfix.rst b/CHANGES/10889.bugfix.rst new file mode 100644 index 00000000000..4bba5595b9e --- /dev/null +++ b/CHANGES/10889.bugfix.rst @@ -0,0 +1,4 @@ +Updated ``Content-Type`` header parsing to return ``application/octet-stream`` when header contains invalid syntax. +See :rfc:`9110#section-8.3-5`. + +-- by :user:`sgaist`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 15a681ee6af..290be0205f1 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -311,6 +311,7 @@ Roman Postnov Rong Zhang Samir Akarioh Samuel Colvin +Samuel Gaist Sean Hunt Sebastian Acuna Sebastian Hanula diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 8d008d1b224..aac0bbc58e7 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -18,7 +18,9 @@ from collections import namedtuple from collections.abc import Callable, Generator, Iterable, Iterator, Mapping from contextlib import suppress +from email.message import EmailMessage from email.parser import HeaderParser +from email.policy import HTTP from email.utils import parsedate from math import ceil from pathlib import Path @@ -347,14 +349,40 @@ def parse_mimetype(mimetype: str) -> MimeType: ) +class EnsureOctetStream(EmailMessage): + def __init__(self) -> None: + super().__init__() + # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 + self.set_default_type("application/octet-stream") + + def get_content_type(self) -> Any: + """Re-implementation from Message + + Returns application/octet-stream in place of plain/text when + value is wrong. + + The way this class is used guarantees that content-type will + be present so simplify the checks wrt to the base implementation. + """ + value = self.get("content-type", "").lower() + + # Based on the implementation of _splitparam in the standard library + ctype, _, _ = value.partition(";") + ctype = ctype.strip() + if ctype.count("/") != 1: + return self.get_default_type() + return ctype + + @functools.lru_cache(maxsize=56) def parse_content_type(raw: str) -> tuple[str, MappingProxyType[str, str]]: """Parse Content-Type header. Returns a tuple of the parsed content type and a - MappingProxyType of parameters. + MappingProxyType of parameters. The default returned value + is `application/octet-stream` """ - msg = HeaderParser().parsestr(f"Content-Type: {raw}") + msg = HeaderParser(EnsureOctetStream, policy=HTTP).parsestr(f"Content-Type: {raw}") content_type = msg.get_content_type() params = msg.get_params(()) content_dict = dict(params[1:]) # First element is content type again diff --git a/docs/client_reference.rst b/docs/client_reference.rst index ab16e35aed5..d8b36b95c91 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1566,16 +1566,14 @@ Response object .. note:: - Returns value is ``'application/octet-stream'`` if no - Content-Type header present in HTTP headers according to - :rfc:`9110`. If the *Content-Type* header is invalid (e.g., ``jpg`` - instead of ``image/jpeg``), the value is ``text/plain`` by default - according to :rfc:`2045`. To see the original header check - ``resp.headers['CONTENT-TYPE']``. + Returns ``'application/octet-stream'`` if no Content-Type header + is present or the value contains invalid syntax according to + :rfc:`9110`. To see the original header check + ``resp.headers["Content-Type"]``. To make sure Content-Type header is not present in the server reply, use :attr:`headers` or :attr:`raw_headers`, e.g. - ``'CONTENT-TYPE' not in resp.headers``. + ``'Content-Type' not in resp.headers``. .. attribute:: charset diff --git a/tests/test_helpers.py b/tests/test_helpers.py index 5693d0b96b2..4baf6b8c0cd 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -6,11 +6,12 @@ import weakref from math import ceil, modf from pathlib import Path +from types import MappingProxyType from unittest import mock from urllib.request import getproxies_environment import pytest -from multidict import MultiDict +from multidict import MultiDict, MultiDictProxy from yarl import URL from aiohttp import helpers @@ -65,6 +66,30 @@ def test_parse_mimetype(mimetype, expected) -> None: assert result == expected +# ------------------- parse_content_type ------------------------------ + + +@pytest.mark.parametrize( + "content_type, expected", + [ + ( + "text/plain", + ("text/plain", MultiDictProxy(MultiDict())), + ), + ( + "wrong", + ("application/octet-stream", MultiDictProxy(MultiDict())), + ), + ], +) +def test_parse_content_type( + content_type: str, expected: tuple[str, MappingProxyType[str, str]] +) -> None: + result = helpers.parse_content_type(content_type) + + assert result == expected + + # ------------------- guess_filename ---------------------------------- diff --git a/tests/test_web_response.py b/tests/test_web_response.py index 609791b5b2b..70a83143641 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1164,10 +1164,10 @@ def test_ctor_content_type_with_extra() -> None: assert resp.headers["content-type"] == "text/plain; version=0.0.4; charset=utf-8" -def test_invalid_content_type_parses_to_text_plain() -> None: +def test_invalid_content_type_parses_to_application_octect_stream() -> None: resp = Response(text="test test", content_type="jpeg") - assert resp.content_type == "text/plain" + assert resp.content_type == "application/octet-stream" assert resp.headers["content-type"] == "jpeg; charset=utf-8" From 3ec6360a15ae4f0603ed96b352ba488fb2f57e48 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 14:31:01 +0100 Subject: [PATCH 35/51] [PR #11651/398329f6 backport][3.14] add xfail test for issue 11632 (#11659) **This is a backport of PR #11651 as merged into master (398329f6ec6237cb0f29149863485cccec65258a).** Co-authored-by: Alexander Mohr --- tests/test_cookie_helpers.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 0ec393e2b79..575bbe54d01 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1137,6 +1137,33 @@ def test_parse_cookie_header_empty() -> None: assert parse_cookie_header(" ") == [] +@pytest.mark.xfail(reason="https://github.com/aio-libs/aiohttp/issues/11632") +def test_parse_cookie_gstate_header() -> None: + header = ( + "_ga=ga; " + "ajs_anonymous_id=0anonymous; " + "analytics_session_id=session; " + "cookies-analytics=true; " + "cookies-functional=true; " + "cookies-marketing=true; " + "cookies-preferences=true; " + 'g_state={"i_l":0,"i_ll":12345,"i_b":"blah"}; ' + "analytics_session_id.last_access=1760128947692; " + "landingPageURLRaw=landingPageURLRaw; " + "landingPageURL=landingPageURL; " + "referrerPageURLRaw=; " + "referrerPageURL=; " + "formURLRaw=formURLRaw; " + "formURL=formURL; " + "fbnAuthExpressCheckout=fbnAuthExpressCheckout; " + "is_express_checkout=1; " + ) + + result = parse_cookie_header(header) + assert result[7][0] == "g_state" + assert result[8][0] == "analytics_session_id.last_access" + + def test_parse_cookie_header_quoted_values() -> None: """Test parse_cookie_header handles quoted values correctly.""" header = 'name="quoted value"; session="with;semicolon"; data="with\\"escaped\\""' From 9cb35e227ebb529fc051ee59b9713782b7dee4c2 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 14:49:24 +0100 Subject: [PATCH 36/51] [PR #11651/398329f6 backport][3.13] add xfail test for issue 11632 (#11658) **This is a backport of PR #11651 as merged into master (398329f6ec6237cb0f29149863485cccec65258a).** Co-authored-by: Alexander Mohr --- tests/test_cookie_helpers.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/tests/test_cookie_helpers.py b/tests/test_cookie_helpers.py index 0ec393e2b79..575bbe54d01 100644 --- a/tests/test_cookie_helpers.py +++ b/tests/test_cookie_helpers.py @@ -1137,6 +1137,33 @@ def test_parse_cookie_header_empty() -> None: assert parse_cookie_header(" ") == [] +@pytest.mark.xfail(reason="https://github.com/aio-libs/aiohttp/issues/11632") +def test_parse_cookie_gstate_header() -> None: + header = ( + "_ga=ga; " + "ajs_anonymous_id=0anonymous; " + "analytics_session_id=session; " + "cookies-analytics=true; " + "cookies-functional=true; " + "cookies-marketing=true; " + "cookies-preferences=true; " + 'g_state={"i_l":0,"i_ll":12345,"i_b":"blah"}; ' + "analytics_session_id.last_access=1760128947692; " + "landingPageURLRaw=landingPageURLRaw; " + "landingPageURL=landingPageURL; " + "referrerPageURLRaw=; " + "referrerPageURL=; " + "formURLRaw=formURLRaw; " + "formURL=formURL; " + "fbnAuthExpressCheckout=fbnAuthExpressCheckout; " + "is_express_checkout=1; " + ) + + result = parse_cookie_header(header) + assert result[7][0] == "g_state" + assert result[8][0] == "analytics_session_id.last_access" + + def test_parse_cookie_header_quoted_values() -> None: """Test parse_cookie_header handles quoted values correctly.""" header = 'name="quoted value"; session="with;semicolon"; data="with\\"escaped\\""' From 7e5a2d7b0c6d8bc2d60f3c2fd355793e2fabe4e5 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 14:49:37 +0100 Subject: [PATCH 37/51] [PR #11655/1e24afc9 backport][3.13] [PR #11580/d261f8a backport][3.14] Ensure that application/octet-stream is the default content_type (#11660) **This is a backport of PR #11655 as merged into 3.14 (1e24afc9556736a36c0301e4ba1be33be9fdf242).** (cherry picked from commit d261f8a3ceaeb9699c13d6f62be9ea875f6f40bc) Co-authored-by: Samuel Gaist --- CHANGES/10889.bugfix.rst | 4 ++++ CONTRIBUTORS.txt | 1 + aiohttp/helpers.py | 32 ++++++++++++++++++++++++++++++-- docs/client_reference.rst | 12 +++++------- tests/test_helpers.py | 27 ++++++++++++++++++++++++++- tests/test_web_response.py | 4 ++-- 6 files changed, 68 insertions(+), 12 deletions(-) create mode 100644 CHANGES/10889.bugfix.rst diff --git a/CHANGES/10889.bugfix.rst b/CHANGES/10889.bugfix.rst new file mode 100644 index 00000000000..4bba5595b9e --- /dev/null +++ b/CHANGES/10889.bugfix.rst @@ -0,0 +1,4 @@ +Updated ``Content-Type`` header parsing to return ``application/octet-stream`` when header contains invalid syntax. +See :rfc:`9110#section-8.3-5`. + +-- by :user:`sgaist`. diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt index 15a681ee6af..290be0205f1 100644 --- a/CONTRIBUTORS.txt +++ b/CONTRIBUTORS.txt @@ -311,6 +311,7 @@ Roman Postnov Rong Zhang Samir Akarioh Samuel Colvin +Samuel Gaist Sean Hunt Sebastian Acuna Sebastian Hanula diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index ace4f0e9b53..21316c18f1e 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -17,7 +17,9 @@ import weakref from collections import namedtuple from contextlib import suppress +from email.message import EmailMessage from email.parser import HeaderParser +from email.policy import HTTP from email.utils import parsedate from math import ceil from pathlib import Path @@ -357,14 +359,40 @@ def parse_mimetype(mimetype: str) -> MimeType: ) +class EnsureOctetStream(EmailMessage): + def __init__(self) -> None: + super().__init__() + # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 + self.set_default_type("application/octet-stream") + + def get_content_type(self) -> Any: + """Re-implementation from Message + + Returns application/octet-stream in place of plain/text when + value is wrong. + + The way this class is used guarantees that content-type will + be present so simplify the checks wrt to the base implementation. + """ + value = self.get("content-type", "").lower() + + # Based on the implementation of _splitparam in the standard library + ctype, _, _ = value.partition(";") + ctype = ctype.strip() + if ctype.count("/") != 1: + return self.get_default_type() + return ctype + + @functools.lru_cache(maxsize=56) def parse_content_type(raw: str) -> Tuple[str, MappingProxyType[str, str]]: """Parse Content-Type header. Returns a tuple of the parsed content type and a - MappingProxyType of parameters. + MappingProxyType of parameters. The default returned value + is `application/octet-stream` """ - msg = HeaderParser().parsestr(f"Content-Type: {raw}") + msg = HeaderParser(EnsureOctetStream, policy=HTTP).parsestr(f"Content-Type: {raw}") content_type = msg.get_content_type() params = msg.get_params(()) content_dict = dict(params[1:]) # First element is content type again diff --git a/docs/client_reference.rst b/docs/client_reference.rst index ab16e35aed5..d8b36b95c91 100644 --- a/docs/client_reference.rst +++ b/docs/client_reference.rst @@ -1566,16 +1566,14 @@ Response object .. note:: - Returns value is ``'application/octet-stream'`` if no - Content-Type header present in HTTP headers according to - :rfc:`9110`. If the *Content-Type* header is invalid (e.g., ``jpg`` - instead of ``image/jpeg``), the value is ``text/plain`` by default - according to :rfc:`2045`. To see the original header check - ``resp.headers['CONTENT-TYPE']``. + Returns ``'application/octet-stream'`` if no Content-Type header + is present or the value contains invalid syntax according to + :rfc:`9110`. To see the original header check + ``resp.headers["Content-Type"]``. To make sure Content-Type header is not present in the server reply, use :attr:`headers` or :attr:`raw_headers`, e.g. - ``'CONTENT-TYPE' not in resp.headers``. + ``'Content-Type' not in resp.headers``. .. attribute:: charset diff --git a/tests/test_helpers.py b/tests/test_helpers.py index a343cbdfedf..f4f28710123 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -6,11 +6,12 @@ import weakref from math import ceil, modf from pathlib import Path +from types import MappingProxyType from unittest import mock from urllib.request import getproxies_environment import pytest -from multidict import MultiDict +from multidict import MultiDict, MultiDictProxy from yarl import URL from aiohttp import helpers @@ -65,6 +66,30 @@ def test_parse_mimetype(mimetype, expected) -> None: assert result == expected +# ------------------- parse_content_type ------------------------------ + + +@pytest.mark.parametrize( + "content_type, expected", + [ + ( + "text/plain", + ("text/plain", MultiDictProxy(MultiDict())), + ), + ( + "wrong", + ("application/octet-stream", MultiDictProxy(MultiDict())), + ), + ], +) +def test_parse_content_type( + content_type: str, expected: tuple[str, MappingProxyType[str, str]] +) -> None: + result = helpers.parse_content_type(content_type) + + assert result == expected + + # ------------------- guess_filename ---------------------------------- diff --git a/tests/test_web_response.py b/tests/test_web_response.py index c07bf671d8c..0525c1584f2 100644 --- a/tests/test_web_response.py +++ b/tests/test_web_response.py @@ -1164,10 +1164,10 @@ def test_ctor_content_type_with_extra() -> None: assert resp.headers["content-type"] == "text/plain; version=0.0.4; charset=utf-8" -def test_invalid_content_type_parses_to_text_plain() -> None: +def test_invalid_content_type_parses_to_application_octect_stream() -> None: resp = Response(text="test test", content_type="jpeg") - assert resp.content_type == "text/plain" + assert resp.content_type == "application/octet-stream" assert resp.headers["content-type"] == "jpeg; charset=utf-8" From e162e4b60224d0982980212b3b36b09be166e9a4 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 15:01:38 +0100 Subject: [PATCH 38/51] [PR #11654/302243e3 backport][3.14] Remove cherry-picker from requirements (#11662) **This is a backport of PR #11654 as merged into master (302243e37274bd9e5a32e405ca11b1b0306f75b1).** Co-authored-by: Sam Bull --- requirements/constraints.txt | 14 -------------- requirements/dev.in | 1 - requirements/dev.txt | 14 -------------- 3 files changed, 29 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 188972f614e..0c66260a267 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -49,11 +49,8 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.4 # via requests -cherry-picker==2.6.0 - # via -r requirements/dev.in click==8.3.0 # via - # cherry-picker # pip-tools # slotscheck # towncrier @@ -88,8 +85,6 @@ frozenlist==1.8.0 # via # -r requirements/runtime-deps.in # aiosignal -gidgethub==5.4.0 - # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in identify==2.6.15 @@ -173,7 +168,6 @@ pygments==2.19.2 # sphinx pyjwt==2.9.0 # via - # gidgethub # pyjwt pyproject-hooks==1.2.0 # via @@ -213,7 +207,6 @@ regex==2025.9.18 # via re-assert requests==2.32.5 # via - # cherry-picker # sphinx # sphinxcontrib-spelling rich==14.2.0 @@ -247,14 +240,9 @@ sphinxcontrib-spelling==8.0.1 ; platform_system != "Windows" # via -r requirements/doc-spelling.in sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in -stamina==25.1.0 - # via cherry-picker -tenacity==9.1.2 - # via stamina tomli==2.3.0 # via # build - # cherry-picker # coverage # mypy # pip-tools @@ -284,8 +272,6 @@ typing-extensions==4.15.0 # virtualenv typing-inspection==0.4.2 # via pydantic -uritemplate==4.2.0 - # via gidgethub urllib3==2.5.0 # via requests uvloop==0.21.0 ; platform_system != "Windows" diff --git a/requirements/dev.in b/requirements/dev.in index 780eae45f5e..75185757c40 100644 --- a/requirements/dev.in +++ b/requirements/dev.in @@ -2,5 +2,4 @@ -r test.in -r doc.in -cherry_picker pip-tools diff --git a/requirements/dev.txt b/requirements/dev.txt index 7778f6c9bb5..1d6df601488 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -49,11 +49,8 @@ cfgv==3.4.0 # via pre-commit charset-normalizer==3.4.4 # via requests -cherry-picker==2.6.0 - # via -r requirements/dev.in click==8.3.0 # via - # cherry-picker # pip-tools # slotscheck # towncrier @@ -86,8 +83,6 @@ frozenlist==1.8.0 # via # -r requirements/runtime-deps.in # aiosignal -gidgethub==5.4.0 - # via cherry-picker gunicorn==23.0.0 # via -r requirements/base.in identify==2.6.15 @@ -168,7 +163,6 @@ pygments==2.19.2 # sphinx pyjwt==2.8.0 # via - # gidgethub # pyjwt pyproject-hooks==1.2.0 # via @@ -208,7 +202,6 @@ regex==2025.9.18 # via re-assert requests==2.32.5 # via - # cherry-picker # sphinx rich==14.2.0 # via pytest-codspeed @@ -238,14 +231,9 @@ sphinxcontrib-serializinghtml==2.0.0 # via sphinx sphinxcontrib-towncrier==0.5.0a0 # via -r requirements/doc.in -stamina==25.1.0 - # via cherry-picker -tenacity==9.1.2 - # via stamina tomli==2.3.0 # via # build - # cherry-picker # coverage # mypy # pip-tools @@ -275,8 +263,6 @@ typing-extensions==4.15.0 # virtualenv typing-inspection==0.4.2 # via pydantic -uritemplate==4.2.0 - # via gidgethub urllib3==2.5.0 # via requests uvloop==0.21.0 ; platform_system != "Windows" and implementation_name == "cpython" From dea08fd22a25e5cdb8d225b907e14f05133d96d0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 16:15:20 +0100 Subject: [PATCH 39/51] [PR #11661/149a8105 backport][3.14] Fix type annotation (#11664) **This is a backport of PR #11661 as merged into master (149a8105e7de73888df11ab1689315f9482aaeae).** Co-authored-by: Sam Bull --- aiohttp/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index aac0bbc58e7..25d639028b0 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -355,7 +355,7 @@ def __init__(self) -> None: # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 self.set_default_type("application/octet-stream") - def get_content_type(self) -> Any: + def get_content_type(self) -> str: """Re-implementation from Message Returns application/octet-stream in place of plain/text when From 7a8615865593794d114fad41854551329c978ecf Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Wed, 15 Oct 2025 16:33:09 +0100 Subject: [PATCH 40/51] [PR #11661/149a8105 backport][3.13] Fix type annotation (#11663) **This is a backport of PR #11661 as merged into master (149a8105e7de73888df11ab1689315f9482aaeae).** Co-authored-by: Sam Bull --- aiohttp/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py index 21316c18f1e..dfab9877d39 100644 --- a/aiohttp/helpers.py +++ b/aiohttp/helpers.py @@ -365,7 +365,7 @@ def __init__(self) -> None: # https://www.rfc-editor.org/rfc/rfc9110#section-8.3-5 self.set_default_type("application/octet-stream") - def get_content_type(self) -> Any: + def get_content_type(self) -> str: """Re-implementation from Message Returns application/octet-stream in place of plain/text when From c5885b0aa472eec58f6f2ee9ded826d3c808db27 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Oct 2025 10:53:50 +0000 Subject: [PATCH 41/51] Bump cryptography from 46.0.2 to 46.0.3 (#11669) Bumps [cryptography](https://github.com/pyca/cryptography) from 46.0.2 to 46.0.3.
Changelog

Sourced from cryptography's changelog.

46.0.3 - 2025-10-15


* Fixed compilation when using LibreSSL 4.2.0.

.. _v46-0-2:

Commits

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=cryptography&package-manager=pip&previous-version=46.0.2&new-version=46.0.3)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 9 ++------- requirements/dev.txt | 12 +++--------- requirements/lint.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 6 files changed, 9 insertions(+), 20 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 0c66260a267..29c937ecfdf 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -59,10 +59,8 @@ coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov -cryptography==46.0.2 - # via - # pyjwt - # trustme +cryptography==46.0.3 + # via trustme cython==3.1.4 # via -r requirements/cython.in distlib==0.4.0 @@ -166,9 +164,6 @@ pygments==2.19.2 # pytest # rich # sphinx -pyjwt==2.9.0 - # via - # pyjwt pyproject-hooks==1.2.0 # via # build diff --git a/requirements/dev.txt b/requirements/dev.txt index 1d6df601488..d03a99b2cec 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -59,10 +59,8 @@ coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov -cryptography==46.0.2 - # via - # pyjwt - # trustme +cryptography==46.0.3 + # via trustme distlib==0.4.0 # via virtualenv docutils==0.21.2 @@ -161,9 +159,6 @@ pygments==2.19.2 # pytest # rich # sphinx -pyjwt==2.8.0 - # via - # pyjwt pyproject-hooks==1.2.0 # via # build @@ -201,8 +196,7 @@ re-assert==1.1.0 regex==2025.9.18 # via re-assert requests==2.32.5 - # via - # sphinx + # via sphinx rich==14.2.0 # via pytest-codspeed setuptools-git==1.2 diff --git a/requirements/lint.txt b/requirements/lint.txt index d2be0368f6c..be80aa83074 100644 --- a/requirements/lint.txt +++ b/requirements/lint.txt @@ -23,7 +23,7 @@ cfgv==3.4.0 # via pre-commit click==8.3.0 # via slotscheck -cryptography==46.0.2 +cryptography==46.0.3 # via trustme distlib==0.4.0 # via virtualenv diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 7843ec10c46..69ac6e51db8 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -18,7 +18,7 @@ coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov -cryptography==46.0.2 +cryptography==46.0.3 # via trustme exceptiongroup==1.3.0 # via pytest diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index a157a44efda..1f78682fe17 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -33,7 +33,7 @@ coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov -cryptography==46.0.2 +cryptography==46.0.3 # via trustme exceptiongroup==1.3.0 # via pytest diff --git a/requirements/test.txt b/requirements/test.txt index 60c2b77e8bb..2ab015bcfcc 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -33,7 +33,7 @@ coverage==7.10.7 # via # -r requirements/test-common.in # pytest-cov -cryptography==46.0.2 +cryptography==46.0.3 # via trustme exceptiongroup==1.3.0 # via pytest From c7e60c498c9b7d2528c6ff5ffc500af4ab55c064 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 16 Oct 2025 11:09:11 +0000 Subject: [PATCH 42/51] Bump coverage from 7.10.7 to 7.11.0 (#11670) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.10.7 to 7.11.0.
Changelog

Sourced from coverage's changelog.

Version 7.11.0 — 2025-10-15

  • Dropped support for Python 3.9, declared support for Python 3.15 alpha.

.. _changes_7-10-7:

Commits
  • 20ef00b docs: sample HTML for 7.11.0
  • 5edf8eb docs: prep for 7.11.0
  • 2c023ae build: 3.15 is supported
  • 2f1b95b refactor: no need for _BaseCoverageException
  • 72b1bcc build: test light-threads on all versions of Python
  • 16e9379 refactor: move core tests to their own file
  • bc8875d test: change a test to be in-process so metacov can capture its work
  • 8e5d5b1 build: tweak some version info
  • b0236df test: more tests for core selection, and some refactoring of them
  • 56edde6 build: next version will be 7.11.0
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=coverage&package-manager=pip&previous-version=7.10.7&new-version=7.11.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@dependabot rebase` will rebase this PR - `@dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@dependabot merge` will merge this PR after your CI passes on it - `@dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@dependabot cancel merge` will cancel a previously requested merge and block automerging - `@dependabot reopen` will reopen this PR if it is closed - `@dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements/constraints.txt | 2 +- requirements/dev.txt | 2 +- requirements/test-common.txt | 2 +- requirements/test-ft.txt | 2 +- requirements/test.txt | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/requirements/constraints.txt b/requirements/constraints.txt index 29c937ecfdf..c86f076606b 100644 --- a/requirements/constraints.txt +++ b/requirements/constraints.txt @@ -55,7 +55,7 @@ click==8.3.0 # slotscheck # towncrier # wait-for-it -coverage==7.10.7 +coverage==7.11.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/dev.txt b/requirements/dev.txt index d03a99b2cec..24e94a7cc4e 100644 --- a/requirements/dev.txt +++ b/requirements/dev.txt @@ -55,7 +55,7 @@ click==8.3.0 # slotscheck # towncrier # wait-for-it -coverage==7.10.7 +coverage==7.11.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-common.txt b/requirements/test-common.txt index 69ac6e51db8..0146b312858 100644 --- a/requirements/test-common.txt +++ b/requirements/test-common.txt @@ -14,7 +14,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.0 # via wait-for-it -coverage==7.10.7 +coverage==7.11.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test-ft.txt b/requirements/test-ft.txt index 1f78682fe17..8ab031bf707 100644 --- a/requirements/test-ft.txt +++ b/requirements/test-ft.txt @@ -29,7 +29,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.0 # via wait-for-it -coverage==7.10.7 +coverage==7.11.0 # via # -r requirements/test-common.in # pytest-cov diff --git a/requirements/test.txt b/requirements/test.txt index 2ab015bcfcc..86d0a85d965 100644 --- a/requirements/test.txt +++ b/requirements/test.txt @@ -29,7 +29,7 @@ cffi==2.0.0 # pytest-codspeed click==8.3.0 # via wait-for-it -coverage==7.10.7 +coverage==7.11.0 # via # -r requirements/test-common.in # pytest-cov From ec0f7aa0b064c93936538ab343e29fa13cd949a0 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 16 Oct 2025 16:11:36 +0100 Subject: [PATCH 43/51] [PR #11666/1590c860 backport][3.14] add xfail test for https://github.com/aio-libs/aiohttp/issues/11665 (#11672) **This is a backport of PR #11666 as merged into master (1590c86059f20d7955e523de139d35f88e87eb23).** Co-authored-by: Alastair <22815637+alastairvox@users.noreply.github.com> --- tests/test_web_urldispatcher.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 7084d821899..3f58f15568b 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -987,6 +987,28 @@ async def get(self) -> web.Response: await r.release() +@pytest.mark.xfail(reason="https://github.com/aio-libs/aiohttp/issues/11665") +async def test_subapp_domain_routing_same_path(aiohttp_client: AiohttpClient) -> None: + app = web.Application() + sub_app = web.Application() + + async def mainapp_handler(request: web.Request) -> web.Response: + assert False + + async def subapp_handler(request: web.Request) -> web.Response: + return web.Response(text="SUBAPP") + + app.router.add_get("/", mainapp_handler) + sub_app.router.add_get("/", subapp_handler) + app.add_domain("different.example.com", sub_app) + + client = await aiohttp_client(app) + async with client.get("/", headers={"Host": "different.example.com"}) as r: + assert r.status == 200 + result = await r.text() + assert result == "SUBAPP" + + async def test_route_with_regex(aiohttp_client: AiohttpClient) -> None: """Test a route with a regex preceded by a fixed string.""" app = web.Application() From 6ce4f1d93d0505083790661f569201eb5a7cdd41 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 16 Oct 2025 16:28:24 +0100 Subject: [PATCH 44/51] [PR #11666/1590c860 backport][3.13] add xfail test for https://github.com/aio-libs/aiohttp/issues/11665 (#11671) **This is a backport of PR #11666 as merged into master (1590c86059f20d7955e523de139d35f88e87eb23).** Co-authored-by: Alastair <22815637+alastairvox@users.noreply.github.com> --- tests/test_web_urldispatcher.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index ee60b6917c5..756b921713a 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -986,6 +986,28 @@ async def get(self) -> web.Response: await r.release() +@pytest.mark.xfail(reason="https://github.com/aio-libs/aiohttp/issues/11665") +async def test_subapp_domain_routing_same_path(aiohttp_client: AiohttpClient) -> None: + app = web.Application() + sub_app = web.Application() + + async def mainapp_handler(request: web.Request) -> web.Response: + assert False + + async def subapp_handler(request: web.Request) -> web.Response: + return web.Response(text="SUBAPP") + + app.router.add_get("/", mainapp_handler) + sub_app.router.add_get("/", subapp_handler) + app.add_domain("different.example.com", sub_app) + + client = await aiohttp_client(app) + async with client.get("/", headers={"Host": "different.example.com"}) as r: + assert r.status == 200 + result = await r.text() + assert result == "SUBAPP" + + async def test_route_with_regex(aiohttp_client: AiohttpClient) -> None: """Test a route with a regex preceded by a fixed string.""" app = web.Application() From 52afe62d0f7a0f364425ceece58696c170216950 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 16 Oct 2025 18:56:35 +0100 Subject: [PATCH 45/51] [PR #11674/c3685dac backport][3.14] docs: clarify that request() middlewares replace session middlewares (#11678) **This is a backport of PR #11674 as merged into master (c3685dac9726a18421a2fe59a716973e9878fb1b).** Co-authored-by: enioxt --- docs/client_middleware_cookbook.rst | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docs/client_middleware_cookbook.rst b/docs/client_middleware_cookbook.rst index 33994160fba..e890b02a4bf 100644 --- a/docs/client_middleware_cookbook.rst +++ b/docs/client_middleware_cookbook.rst @@ -98,6 +98,29 @@ Using both of these together in a session should provide full SSRF protection. Best Practices -------------- +.. important:: + + **Request-level middlewares replace session middlewares**: When you pass ``middlewares`` + to ``request()`` or its convenience methods (``get()``, ``post()``, etc.), it completely + replaces the session-level middlewares, rather than extending them. This differs from + other parameters like ``headers``, which are merged. + + .. code-block:: python + + session = ClientSession(middlewares=[middleware_session]) + + # Session middleware is used + await session.get("http://example.com") + + # Session middleware is NOT used, only request middleware + await session.get("http://example.com", middlewares=[middleware_request]) + + # To use both, explicitly pass both + await session.get( + "http://example.com", + middlewares=[middleware_session, middleware_request] + ) + 1. **Keep middleware focused**: Each middleware should have a single responsibility. 2. **Order matters**: Middlewares execute in the order they're listed. Place logging first, From b8eff25bb9dae59c5c00ac0f757eb0b5a0d4d20f Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 16 Oct 2025 18:56:49 +0100 Subject: [PATCH 46/51] [PR #11674/c3685dac backport][3.13] docs: clarify that request() middlewares replace session middlewares (#11677) **This is a backport of PR #11674 as merged into master (c3685dac9726a18421a2fe59a716973e9878fb1b).** Co-authored-by: enioxt --- docs/client_middleware_cookbook.rst | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/docs/client_middleware_cookbook.rst b/docs/client_middleware_cookbook.rst index 33994160fba..e890b02a4bf 100644 --- a/docs/client_middleware_cookbook.rst +++ b/docs/client_middleware_cookbook.rst @@ -98,6 +98,29 @@ Using both of these together in a session should provide full SSRF protection. Best Practices -------------- +.. important:: + + **Request-level middlewares replace session middlewares**: When you pass ``middlewares`` + to ``request()`` or its convenience methods (``get()``, ``post()``, etc.), it completely + replaces the session-level middlewares, rather than extending them. This differs from + other parameters like ``headers``, which are merged. + + .. code-block:: python + + session = ClientSession(middlewares=[middleware_session]) + + # Session middleware is used + await session.get("http://example.com") + + # Session middleware is NOT used, only request middleware + await session.get("http://example.com", middlewares=[middleware_request]) + + # To use both, explicitly pass both + await session.get( + "http://example.com", + middlewares=[middleware_session, middleware_request] + ) + 1. **Keep middleware focused**: Each middleware should have a single responsibility. 2. **Order matters**: Middlewares execute in the order they're listed. Place logging first, From fb69cf0bc9f380e04b632fe92f76c4613902978d Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 16 Oct 2025 18:57:04 +0100 Subject: [PATCH 47/51] [PR #11673/e8c5252c backport][3.14] Fix domain matching being lower priority than path matching (#11676) **This is a backport of PR #11673 as merged into master (e8c5252cb28ce1db0232fc5fc66909791313c08b).** Co-authored-by: Sam Bull --- CHANGES/11673.bugfix.rst | 2 ++ aiohttp/web_urldispatcher.py | 30 +++++++++++++++--------------- docs/web_reference.rst | 8 ++++++++ tests/test_web_urldispatcher.py | 2 +- 4 files changed, 26 insertions(+), 16 deletions(-) create mode 100644 CHANGES/11673.bugfix.rst diff --git a/CHANGES/11673.bugfix.rst b/CHANGES/11673.bugfix.rst new file mode 100644 index 00000000000..accbe850847 --- /dev/null +++ b/CHANGES/11673.bugfix.rst @@ -0,0 +1,2 @@ +Fixed routing to a sub-application added via ``.add_domain()`` not working +if the same path exists on the parent app. -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 40d29dda794..e50b5212bb5 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -1006,6 +1006,21 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: resource_index = self._resource_index allowed_methods: set[str] = set() + # MatchedSubAppResource is primarily used to match on domain names + # (though custom rules could match on other things). This means that + # the traversal algorithm below can't be applied, and that we likely + # need to check these first so a sub app that defines the same path + # as a parent app will get priority if there's a domain match. + # + # For most cases we do not expect there to be many of these since + # currently they are only added by `.add_domain()`. + for resource in self._matched_sub_app_resources: + match_dict, allowed = await resource.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + # Walk the url parts looking for candidates. We walk the url backwards # to ensure the most explicit match is found first. If there are multiple # candidates for a given url part because there are multiple resources @@ -1023,21 +1038,6 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: break url_part = url_part.rpartition("/")[0] or "/" - # - # We didn't find any candidates, so we'll try the matched sub-app - # resources which we have to walk in a linear fashion because they - # have regex/wildcard match rules and we cannot index them. - # - # For most cases we do not expect there to be many of these since - # currently they are only added by `add_domain` - # - for resource in self._matched_sub_app_resources: - match_dict, allowed = await resource.resolve(request) - if match_dict is not None: - return match_dict - else: - allowed_methods |= allowed - if allowed_methods: return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods)) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 4400765b072..5ae15478b4f 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1625,6 +1625,14 @@ Application and Router matches the pattern *domain* then further resolving is passed to *subapp*. + .. warning:: + + Registering many domains using this method may cause performance + issues with handler routing. If you have a substantial number of + applications for different domains, you may want to consider + using a reverse proxy (such as Nginx) to handle routing to + different apps, rather that registering them as sub-applications. + :param str domain: domain or mask of domain for the resource. :param Application subapp: nested application. diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 3f58f15568b..1daa3f496fd 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -987,8 +987,8 @@ async def get(self) -> web.Response: await r.release() -@pytest.mark.xfail(reason="https://github.com/aio-libs/aiohttp/issues/11665") async def test_subapp_domain_routing_same_path(aiohttp_client: AiohttpClient) -> None: + """Regression test for #11665.""" app = web.Application() sub_app = web.Application() From e618dcb5226bb38b997f9558f37700f2d5619452 Mon Sep 17 00:00:00 2001 From: "patchback[bot]" <45432694+patchback[bot]@users.noreply.github.com> Date: Thu, 16 Oct 2025 18:57:15 +0100 Subject: [PATCH 48/51] [PR #11673/e8c5252c backport][3.13] Fix domain matching being lower priority than path matching (#11675) **This is a backport of PR #11673 as merged into master (e8c5252cb28ce1db0232fc5fc66909791313c08b).** Co-authored-by: Sam Bull --- CHANGES/11673.bugfix.rst | 2 ++ aiohttp/web_urldispatcher.py | 30 +++++++++++++++--------------- docs/web_reference.rst | 8 ++++++++ tests/test_web_urldispatcher.py | 2 +- 4 files changed, 26 insertions(+), 16 deletions(-) create mode 100644 CHANGES/11673.bugfix.rst diff --git a/CHANGES/11673.bugfix.rst b/CHANGES/11673.bugfix.rst new file mode 100644 index 00000000000..accbe850847 --- /dev/null +++ b/CHANGES/11673.bugfix.rst @@ -0,0 +1,2 @@ +Fixed routing to a sub-application added via ``.add_domain()`` not working +if the same path exists on the parent app. -- by :user:`Dreamsorcerer`. diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py index 61766f0c5c6..8213456c5f5 100644 --- a/aiohttp/web_urldispatcher.py +++ b/aiohttp/web_urldispatcher.py @@ -1034,6 +1034,21 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: resource_index = self._resource_index allowed_methods: Set[str] = set() + # MatchedSubAppResource is primarily used to match on domain names + # (though custom rules could match on other things). This means that + # the traversal algorithm below can't be applied, and that we likely + # need to check these first so a sub app that defines the same path + # as a parent app will get priority if there's a domain match. + # + # For most cases we do not expect there to be many of these since + # currently they are only added by `.add_domain()`. + for resource in self._matched_sub_app_resources: + match_dict, allowed = await resource.resolve(request) + if match_dict is not None: + return match_dict + else: + allowed_methods |= allowed + # Walk the url parts looking for candidates. We walk the url backwards # to ensure the most explicit match is found first. If there are multiple # candidates for a given url part because there are multiple resources @@ -1051,21 +1066,6 @@ async def resolve(self, request: Request) -> UrlMappingMatchInfo: break url_part = url_part.rpartition("/")[0] or "/" - # - # We didn't find any candidates, so we'll try the matched sub-app - # resources which we have to walk in a linear fashion because they - # have regex/wildcard match rules and we cannot index them. - # - # For most cases we do not expect there to be many of these since - # currently they are only added by `add_domain` - # - for resource in self._matched_sub_app_resources: - match_dict, allowed = await resource.resolve(request) - if match_dict is not None: - return match_dict - else: - allowed_methods |= allowed - if allowed_methods: return MatchInfoError(HTTPMethodNotAllowed(request.method, allowed_methods)) diff --git a/docs/web_reference.rst b/docs/web_reference.rst index 4400765b072..5ae15478b4f 100644 --- a/docs/web_reference.rst +++ b/docs/web_reference.rst @@ -1625,6 +1625,14 @@ Application and Router matches the pattern *domain* then further resolving is passed to *subapp*. + .. warning:: + + Registering many domains using this method may cause performance + issues with handler routing. If you have a substantial number of + applications for different domains, you may want to consider + using a reverse proxy (such as Nginx) to handle routing to + different apps, rather that registering them as sub-applications. + :param str domain: domain or mask of domain for the resource. :param Application subapp: nested application. diff --git a/tests/test_web_urldispatcher.py b/tests/test_web_urldispatcher.py index 756b921713a..11ec47c1730 100644 --- a/tests/test_web_urldispatcher.py +++ b/tests/test_web_urldispatcher.py @@ -986,8 +986,8 @@ async def get(self) -> web.Response: await r.release() -@pytest.mark.xfail(reason="https://github.com/aio-libs/aiohttp/issues/11665") async def test_subapp_domain_routing_same_path(aiohttp_client: AiohttpClient) -> None: + """Regression test for #11665.""" app = web.Application() sub_app = web.Application() From b2ddca1bca01c9cf69019241593a21f9b72ad138 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 08:11:39 -1000 Subject: [PATCH 49/51] [PR #11634/cde03b9 backport][3.13] Fix blocking I/O to load netrc when creating requests (#11680) --- CHANGES/11634.bugfix.rst | 1 + aiohttp/client.py | 29 ++++++++ aiohttp/client_reqrep.py | 6 -- tests/conftest.py | 34 +++++++-- tests/test_client_functional.py | 126 +++++++++++++++++++++++--------- tests/test_client_request.py | 22 +----- tests/test_client_session.py | 81 +++++++++++++++++++- 7 files changed, 233 insertions(+), 66 deletions(-) create mode 100644 CHANGES/11634.bugfix.rst diff --git a/CHANGES/11634.bugfix.rst b/CHANGES/11634.bugfix.rst new file mode 100644 index 00000000000..649577c50b9 --- /dev/null +++ b/CHANGES/11634.bugfix.rst @@ -0,0 +1 @@ +Fixed blocking I/O in the event loop when using netrc authentication by moving netrc file lookup to an executor -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index 0c72d5948ce..fb8287fc23c 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -98,7 +98,9 @@ EMPTY_BODY_METHODS, BasicAuth, TimeoutHandle, + basicauth_from_netrc, get_env_proxy_for_url, + netrc_from_env, sentinel, strip_auth_from_url, ) @@ -657,6 +659,20 @@ async def _request( ) ): auth = self._default_auth + + # Try netrc if auth is still None and trust_env is enabled. + # Only check if NETRC environment variable is set to avoid + # creating an expensive executor job unnecessarily. + if ( + auth is None + and self._trust_env + and url.host is not None + and os.environ.get("NETRC") + ): + auth = await self._loop.run_in_executor( + None, self._get_netrc_auth, url.host + ) + # It would be confusing if we support explicit # Authorization header with auth argument if ( @@ -1211,6 +1227,19 @@ def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str] added_names.add(key) return result + def _get_netrc_auth(self, host: str) -> Optional[BasicAuth]: + """ + Get auth from netrc for the given host. + + This method is designed to be called in an executor to avoid + blocking I/O in the event loop. + """ + netrc_obj = netrc_from_env() + try: + return basicauth_from_netrc(netrc_obj, host) + except LookupError: + return None + if sys.version_info >= (3, 11) and TYPE_CHECKING: def get( diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 4f5bed9b9c9..a9e0795893d 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -53,8 +53,6 @@ BasicAuth, HeadersMixin, TimerNoop, - basicauth_from_netrc, - netrc_from_env, noop, reify, sentinel, @@ -1164,10 +1162,6 @@ def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> Non """Set basic auth.""" if auth is None: auth = self.auth - if auth is None and trust_env and self.url.host is not None: - netrc_obj = netrc_from_env() - with contextlib.suppress(LookupError): - auth = basicauth_from_netrc(netrc_obj, self.url.host) if auth is None: return diff --git a/tests/conftest.py b/tests/conftest.py index 2be64078659..62efff55f86 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -69,10 +69,6 @@ def blockbuster(request: pytest.FixtureRequest) -> Iterator[None]: with blockbuster_ctx( "aiohttp", excluded_modules=["aiohttp.pytest_plugin", "aiohttp.test_utils"] ) as bb: - # TODO: Fix blocking call in ClientRequest's constructor. - # https://github.com/aio-libs/aiohttp/issues/10435 - for func in ["io.TextIOWrapper.read", "os.stat"]: - bb.functions[func].can_block_in("aiohttp/client_reqrep.py", "update_auth") for func in [ "os.getcwd", "os.readlink", @@ -285,7 +281,35 @@ def netrc_contents( @pytest.fixture -def start_connection(): +def netrc_default_contents(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: + """Create a temporary netrc file with default test credentials and set NETRC env var.""" + netrc_file = tmp_path / ".netrc" + netrc_file.write_text("default login netrc_user password netrc_pass\n") + + monkeypatch.setenv("NETRC", str(netrc_file)) + + return netrc_file + + +@pytest.fixture +def no_netrc(monkeypatch: pytest.MonkeyPatch) -> None: + """Ensure NETRC environment variable is not set.""" + monkeypatch.delenv("NETRC", raising=False) + + +@pytest.fixture +def netrc_other_host(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: + """Create a temporary netrc file with credentials for a different host and set NETRC env var.""" + netrc_file = tmp_path / ".netrc" + netrc_file.write_text("machine other.example.com login user password pass\n") + + monkeypatch.setenv("NETRC", str(netrc_file)) + + return netrc_file + + +@pytest.fixture +def start_connection() -> Iterator[mock.Mock]: with mock.patch( "aiohttp.connector.aiohappyeyeballs.start_connection", autospec=True, diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index 6ebfd01c6ca..92ba1cf5204 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -77,8 +77,25 @@ def fname(here): return here / "conftest.py" -async def test_keepalive_two_requests_success(aiohttp_client) -> None: - async def handler(request): +@pytest.fixture +def headers_echo_client( + aiohttp_client: AiohttpClient, +) -> Callable[..., Awaitable[TestClient[web.Request, web.Application]]]: + """Create a client with an app that echoes request headers as JSON.""" + + async def factory(**kwargs: Any) -> TestClient[web.Request, web.Application]: + async def handler(request: web.Request) -> web.Response: + return web.json_response({"headers": dict(request.headers)}) + + app = web.Application() + app.router.add_get("/", handler) + return await aiohttp_client(app, **kwargs) + + return factory + + +async def test_keepalive_two_requests_success(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: body = await request.read() assert b"" == body return web.Response(body=b"OK") @@ -3712,14 +3729,12 @@ async def handler(request): assert not ctx._coro.cr_running -async def test_session_auth(aiohttp_client) -> None: - async def handler(request): - return web.json_response({"headers": dict(request.headers)}) - - app = web.Application() - app.router.add_get("/", handler) - - client = await aiohttp_client(app, auth=aiohttp.BasicAuth("login", "pass")) +async def test_session_auth( + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + client = await headers_echo_client(auth=aiohttp.BasicAuth("login", "pass")) r = await client.get("/") assert r.status == 200 @@ -3727,14 +3742,12 @@ async def handler(request): assert content["headers"]["Authorization"] == "Basic bG9naW46cGFzcw==" -async def test_session_auth_override(aiohttp_client) -> None: - async def handler(request): - return web.json_response({"headers": dict(request.headers)}) - - app = web.Application() - app.router.add_get("/", handler) - - client = await aiohttp_client(app, auth=aiohttp.BasicAuth("login", "pass")) +async def test_session_auth_override( + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + client = await headers_echo_client(auth=aiohttp.BasicAuth("login", "pass")) r = await client.get("/", auth=aiohttp.BasicAuth("other_login", "pass")) assert r.status == 200 @@ -3756,14 +3769,63 @@ async def handler(request): await client.get("/", headers=headers) -async def test_session_headers(aiohttp_client) -> None: - async def handler(request): - return web.json_response({"headers": dict(request.headers)}) +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_from_env( # type: ignore[misc] + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + """Test that netrc authentication works when NETRC env var is set and trust_env=True.""" + client = await headers_echo_client(trust_env=True) + async with client.get("/") as r: + assert r.status == 200 + content = await r.json() + # Base64 encoded "netrc_user:netrc_pass" is "bmV0cmNfdXNlcjpuZXRyY19wYXNz" + assert content["headers"]["Authorization"] == "Basic bmV0cmNfdXNlcjpuZXRyY19wYXNz" - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app, headers={"X-Real-IP": "192.168.0.1"}) +@pytest.mark.usefixtures("no_netrc") +async def test_netrc_auth_skipped_without_env_var( # type: ignore[misc] + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + """Test that netrc authentication is skipped when NETRC env var is not set.""" + client = await headers_echo_client(trust_env=True) + async with client.get("/") as r: + assert r.status == 200 + content = await r.json() + # No Authorization header should be present + assert "Authorization" not in content["headers"] + + +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_overridden_by_explicit_auth( # type: ignore[misc] + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + """Test that explicit auth parameter overrides netrc authentication.""" + client = await headers_echo_client(trust_env=True) + # Make request with explicit auth (should override netrc) + async with client.get( + "/", auth=aiohttp.BasicAuth("explicit_user", "explicit_pass") + ) as r: + assert r.status == 200 + content = await r.json() + # Base64 encoded "explicit_user:explicit_pass" is "ZXhwbGljaXRfdXNlcjpleHBsaWNpdF9wYXNz" + assert ( + content["headers"]["Authorization"] + == "Basic ZXhwbGljaXRfdXNlcjpleHBsaWNpdF9wYXNz" + ) + + +async def test_session_headers( + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + client = await headers_echo_client(headers={"X-Real-IP": "192.168.0.1"}) r = await client.get("/") assert r.status == 200 @@ -3771,15 +3833,13 @@ async def handler(request): assert content["headers"]["X-Real-IP"] == "192.168.0.1" -async def test_session_headers_merge(aiohttp_client) -> None: - async def handler(request): - return web.json_response({"headers": dict(request.headers)}) - - app = web.Application() - app.router.add_get("/", handler) - - client = await aiohttp_client( - app, headers=[("X-Real-IP", "192.168.0.1"), ("X-Sent-By", "requests")] +async def test_session_headers_merge( + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + client = await headers_echo_client( + headers=[("X-Real-IP", "192.168.0.1"), ("X-Sent-By", "requests")] ) r = await client.get("/", headers={"X-Sent-By": "aiohttp"}) diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 950dd93aeb6..db25f6ff910 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -15,7 +15,7 @@ from yarl import URL import aiohttp -from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp import BaseConnector, hdrs, payload from aiohttp.abc import AbstractStreamWriter from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( @@ -1545,26 +1545,6 @@ def test_gen_default_accept_encoding( assert _gen_default_accept_encoding() == expected -@pytest.mark.parametrize( - ("netrc_contents", "expected_auth"), - [ - ( - "machine example.com login username password pass\n", - helpers.BasicAuth("username", "pass"), - ) - ], - indirect=("netrc_contents",), -) -@pytest.mark.usefixtures("netrc_contents") -def test_basicauth_from_netrc_present( - make_request: Any, - expected_auth: Optional[helpers.BasicAuth], -): - """Test appropriate Authorization header is sent when netrc is not empty.""" - req = make_request("get", "http://example.com", trust_env=True) - assert req.headers[hdrs.AUTHORIZATION] == expected_auth.encode() - - @pytest.mark.parametrize( "netrc_contents", ("machine example.com login username password pass\n",), diff --git a/tests/test_client_session.py b/tests/test_client_session.py index c296c9670b0..8b148e742c5 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -25,6 +25,7 @@ from aiohttp.helpers import DEBUG from aiohttp.http import RawResponseMessage from aiohttp.pytest_plugin import AiohttpServer +from aiohttp.test_utils import TestServer from aiohttp.tracing import Trace @@ -75,7 +76,24 @@ def params(): ) -async def test_close_coro(create_session) -> None: +@pytest.fixture +async def auth_server(aiohttp_server: AiohttpServer) -> TestServer: + """Create a server with an auth handler that returns auth header or 'no_auth'.""" + + async def handler(request: web.Request) -> web.Response: + auth_header = request.headers.get(hdrs.AUTHORIZATION) + if auth_header: + return web.Response(text=f"auth:{auth_header}") + return web.Response(text="no_auth") + + app = web.Application() + app.router.add_get("/", handler) + return await aiohttp_server(app) + + +async def test_close_coro( + create_session: Callable[..., Awaitable[ClientSession]], +) -> None: session = await create_session() await session.close() @@ -1321,3 +1339,64 @@ async def test_properties( value = uuid4() setattr(session, inner_name, value) assert value == getattr(session, outer_name) + + +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_with_trust_env(auth_server: TestServer) -> None: + """Test that netrc authentication works with ClientSession when NETRC env var is set.""" + async with ( + ClientSession(trust_env=True) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + # Base64 encoded "netrc_user:netrc_pass" is "bmV0cmNfdXNlcjpuZXRyY19wYXNz" + assert text == "auth:Basic bmV0cmNfdXNlcjpuZXRyY19wYXNz" + + +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_skipped_without_trust_env(auth_server: TestServer) -> None: + """Test that netrc authentication is skipped when trust_env=False.""" + async with ( + ClientSession(trust_env=False) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + assert text == "no_auth" + + +@pytest.mark.usefixtures("no_netrc") +async def test_netrc_auth_skipped_without_netrc_env(auth_server: TestServer) -> None: + """Test that netrc authentication is skipped when NETRC env var is not set.""" + async with ( + ClientSession(trust_env=True) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + assert text == "no_auth" + + +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_overridden_by_explicit_auth(auth_server: TestServer) -> None: + """Test that explicit auth parameter overrides netrc authentication.""" + async with ( + ClientSession(trust_env=True) as session, + session.get( + auth_server.make_url("/"), + auth=aiohttp.BasicAuth("explicit_user", "explicit_pass"), + ) as resp, + ): + text = await resp.text() + # Base64 encoded "explicit_user:explicit_pass" is "ZXhwbGljaXRfdXNlcjpleHBsaWNpdF9wYXNz" + assert text == "auth:Basic ZXhwbGljaXRfdXNlcjpleHBsaWNpdF9wYXNz" + + +@pytest.mark.usefixtures("netrc_other_host") +async def test_netrc_auth_host_not_in_netrc(auth_server: TestServer) -> None: + """Test that netrc lookup returns None when host is not in netrc file.""" + async with ( + ClientSession(trust_env=True) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + # Should not have auth since the host is not in netrc + assert text == "no_auth" From 46ef156b015a6a329174289296f8a3b9e6f28d88 Mon Sep 17 00:00:00 2001 From: "J. Nick Koston" Date: Thu, 16 Oct 2025 08:11:47 -1000 Subject: [PATCH 50/51] [PR #11634/cde03b9 backport][3.14] Fix blocking I/O to load netrc when creating requests (#11679) --- CHANGES/11634.bugfix.rst | 1 + aiohttp/client.py | 29 ++++++++ aiohttp/client_reqrep.py | 6 -- tests/conftest.py | 34 +++++++-- tests/test_client_functional.py | 126 +++++++++++++++++++++++--------- tests/test_client_request.py | 22 +----- tests/test_client_session.py | 81 +++++++++++++++++++- 7 files changed, 233 insertions(+), 66 deletions(-) create mode 100644 CHANGES/11634.bugfix.rst diff --git a/CHANGES/11634.bugfix.rst b/CHANGES/11634.bugfix.rst new file mode 100644 index 00000000000..649577c50b9 --- /dev/null +++ b/CHANGES/11634.bugfix.rst @@ -0,0 +1 @@ +Fixed blocking I/O in the event loop when using netrc authentication by moving netrc file lookup to an executor -- by :user:`bdraco`. diff --git a/aiohttp/client.py b/aiohttp/client.py index f627aa5d927..b99f834d0bc 100644 --- a/aiohttp/client.py +++ b/aiohttp/client.py @@ -86,7 +86,9 @@ EMPTY_BODY_METHODS, BasicAuth, TimeoutHandle, + basicauth_from_netrc, get_env_proxy_for_url, + netrc_from_env, sentinel, strip_auth_from_url, ) @@ -641,6 +643,20 @@ async def _request( ) ): auth = self._default_auth + + # Try netrc if auth is still None and trust_env is enabled. + # Only check if NETRC environment variable is set to avoid + # creating an expensive executor job unnecessarily. + if ( + auth is None + and self._trust_env + and url.host is not None + and os.environ.get("NETRC") + ): + auth = await self._loop.run_in_executor( + None, self._get_netrc_auth, url.host + ) + # It would be confusing if we support explicit # Authorization header with auth argument if ( @@ -1195,6 +1211,19 @@ def _prepare_headers(self, headers: LooseHeaders | None) -> "CIMultiDict[str]": added_names.add(key) return result + def _get_netrc_auth(self, host: str) -> BasicAuth | None: + """ + Get auth from netrc for the given host. + + This method is designed to be called in an executor to avoid + blocking I/O in the event loop. + """ + netrc_obj = netrc_from_env() + try: + return basicauth_from_netrc(netrc_obj, host) + except LookupError: + return None + if sys.version_info >= (3, 11) and TYPE_CHECKING: def get( diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py index 43106d217e7..d8dd4971e4e 100644 --- a/aiohttp/client_reqrep.py +++ b/aiohttp/client_reqrep.py @@ -40,8 +40,6 @@ BasicAuth, HeadersMixin, TimerNoop, - basicauth_from_netrc, - netrc_from_env, noop, reify, sentinel, @@ -1149,10 +1147,6 @@ def update_auth(self, auth: BasicAuth | None, trust_env: bool = False) -> None: """Set basic auth.""" if auth is None: auth = self.auth - if auth is None and trust_env and self.url.host is not None: - netrc_obj = netrc_from_env() - with contextlib.suppress(LookupError): - auth = basicauth_from_netrc(netrc_obj, self.url.host) if auth is None: return diff --git a/tests/conftest.py b/tests/conftest.py index 9ae9c19df11..bde9500f129 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -70,10 +70,6 @@ def blockbuster(request: pytest.FixtureRequest) -> Iterator[None]: with blockbuster_ctx( "aiohttp", excluded_modules=["aiohttp.pytest_plugin", "aiohttp.test_utils"] ) as bb: - # TODO: Fix blocking call in ClientRequest's constructor. - # https://github.com/aio-libs/aiohttp/issues/10435 - for func in ["io.TextIOWrapper.read", "os.stat"]: - bb.functions[func].can_block_in("aiohttp/client_reqrep.py", "update_auth") for func in [ "os.getcwd", "os.readlink", @@ -286,7 +282,35 @@ def netrc_contents( @pytest.fixture -def start_connection(): +def netrc_default_contents(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: + """Create a temporary netrc file with default test credentials and set NETRC env var.""" + netrc_file = tmp_path / ".netrc" + netrc_file.write_text("default login netrc_user password netrc_pass\n") + + monkeypatch.setenv("NETRC", str(netrc_file)) + + return netrc_file + + +@pytest.fixture +def no_netrc(monkeypatch: pytest.MonkeyPatch) -> None: + """Ensure NETRC environment variable is not set.""" + monkeypatch.delenv("NETRC", raising=False) + + +@pytest.fixture +def netrc_other_host(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> Path: + """Create a temporary netrc file with credentials for a different host and set NETRC env var.""" + netrc_file = tmp_path / ".netrc" + netrc_file.write_text("machine other.example.com login user password pass\n") + + monkeypatch.setenv("NETRC", str(netrc_file)) + + return netrc_file + + +@pytest.fixture +def start_connection() -> Iterator[mock.Mock]: with mock.patch( "aiohttp.connector.aiohappyeyeballs.start_connection", autospec=True, diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py index fd63f1f59c3..7ff53719146 100644 --- a/tests/test_client_functional.py +++ b/tests/test_client_functional.py @@ -69,8 +69,25 @@ def fname(here): return here / "conftest.py" -async def test_keepalive_two_requests_success(aiohttp_client) -> None: - async def handler(request): +@pytest.fixture +def headers_echo_client( + aiohttp_client: AiohttpClient, +) -> Callable[..., Awaitable[TestClient[web.Request, web.Application]]]: + """Create a client with an app that echoes request headers as JSON.""" + + async def factory(**kwargs: Any) -> TestClient[web.Request, web.Application]: + async def handler(request: web.Request) -> web.Response: + return web.json_response({"headers": dict(request.headers)}) + + app = web.Application() + app.router.add_get("/", handler) + return await aiohttp_client(app, **kwargs) + + return factory + + +async def test_keepalive_two_requests_success(aiohttp_client: AiohttpClient) -> None: + async def handler(request: web.Request) -> web.Response: body = await request.read() assert b"" == body return web.Response(body=b"OK") @@ -3704,14 +3721,12 @@ async def handler(request): assert not ctx._coro.cr_running -async def test_session_auth(aiohttp_client) -> None: - async def handler(request): - return web.json_response({"headers": dict(request.headers)}) - - app = web.Application() - app.router.add_get("/", handler) - - client = await aiohttp_client(app, auth=aiohttp.BasicAuth("login", "pass")) +async def test_session_auth( + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + client = await headers_echo_client(auth=aiohttp.BasicAuth("login", "pass")) r = await client.get("/") assert r.status == 200 @@ -3719,14 +3734,12 @@ async def handler(request): assert content["headers"]["Authorization"] == "Basic bG9naW46cGFzcw==" -async def test_session_auth_override(aiohttp_client) -> None: - async def handler(request): - return web.json_response({"headers": dict(request.headers)}) - - app = web.Application() - app.router.add_get("/", handler) - - client = await aiohttp_client(app, auth=aiohttp.BasicAuth("login", "pass")) +async def test_session_auth_override( + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + client = await headers_echo_client(auth=aiohttp.BasicAuth("login", "pass")) r = await client.get("/", auth=aiohttp.BasicAuth("other_login", "pass")) assert r.status == 200 @@ -3748,14 +3761,63 @@ async def handler(request): await client.get("/", headers=headers) -async def test_session_headers(aiohttp_client) -> None: - async def handler(request): - return web.json_response({"headers": dict(request.headers)}) +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_from_env( # type: ignore[misc] + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + """Test that netrc authentication works when NETRC env var is set and trust_env=True.""" + client = await headers_echo_client(trust_env=True) + async with client.get("/") as r: + assert r.status == 200 + content = await r.json() + # Base64 encoded "netrc_user:netrc_pass" is "bmV0cmNfdXNlcjpuZXRyY19wYXNz" + assert content["headers"]["Authorization"] == "Basic bmV0cmNfdXNlcjpuZXRyY19wYXNz" - app = web.Application() - app.router.add_get("/", handler) - client = await aiohttp_client(app, headers={"X-Real-IP": "192.168.0.1"}) +@pytest.mark.usefixtures("no_netrc") +async def test_netrc_auth_skipped_without_env_var( # type: ignore[misc] + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + """Test that netrc authentication is skipped when NETRC env var is not set.""" + client = await headers_echo_client(trust_env=True) + async with client.get("/") as r: + assert r.status == 200 + content = await r.json() + # No Authorization header should be present + assert "Authorization" not in content["headers"] + + +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_overridden_by_explicit_auth( # type: ignore[misc] + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + """Test that explicit auth parameter overrides netrc authentication.""" + client = await headers_echo_client(trust_env=True) + # Make request with explicit auth (should override netrc) + async with client.get( + "/", auth=aiohttp.BasicAuth("explicit_user", "explicit_pass") + ) as r: + assert r.status == 200 + content = await r.json() + # Base64 encoded "explicit_user:explicit_pass" is "ZXhwbGljaXRfdXNlcjpleHBsaWNpdF9wYXNz" + assert ( + content["headers"]["Authorization"] + == "Basic ZXhwbGljaXRfdXNlcjpleHBsaWNpdF9wYXNz" + ) + + +async def test_session_headers( + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + client = await headers_echo_client(headers={"X-Real-IP": "192.168.0.1"}) r = await client.get("/") assert r.status == 200 @@ -3763,15 +3825,13 @@ async def handler(request): assert content["headers"]["X-Real-IP"] == "192.168.0.1" -async def test_session_headers_merge(aiohttp_client) -> None: - async def handler(request): - return web.json_response({"headers": dict(request.headers)}) - - app = web.Application() - app.router.add_get("/", handler) - - client = await aiohttp_client( - app, headers=[("X-Real-IP", "192.168.0.1"), ("X-Sent-By", "requests")] +async def test_session_headers_merge( + headers_echo_client: Callable[ + ..., Awaitable[TestClient[web.Request, web.Application]] + ], +) -> None: + client = await headers_echo_client( + headers=[("X-Real-IP", "192.168.0.1"), ("X-Sent-By", "requests")] ) r = await client.get("/", headers={"X-Sent-By": "aiohttp"}) diff --git a/tests/test_client_request.py b/tests/test_client_request.py index 56d35ac3433..08bd698b84c 100644 --- a/tests/test_client_request.py +++ b/tests/test_client_request.py @@ -15,7 +15,7 @@ from yarl import URL import aiohttp -from aiohttp import BaseConnector, hdrs, helpers, payload +from aiohttp import BaseConnector, hdrs, payload from aiohttp.abc import AbstractStreamWriter from aiohttp.client_exceptions import ClientConnectionError from aiohttp.client_reqrep import ( @@ -1545,26 +1545,6 @@ def test_gen_default_accept_encoding( assert _gen_default_accept_encoding() == expected -@pytest.mark.parametrize( - ("netrc_contents", "expected_auth"), - [ - ( - "machine example.com login username password pass\n", - helpers.BasicAuth("username", "pass"), - ) - ], - indirect=("netrc_contents",), -) -@pytest.mark.usefixtures("netrc_contents") -def test_basicauth_from_netrc_present( - make_request: Any, - expected_auth: helpers.BasicAuth | None, -): - """Test appropriate Authorization header is sent when netrc is not empty.""" - req = make_request("get", "http://example.com", trust_env=True) - assert req.headers[hdrs.AUTHORIZATION] == expected_auth.encode() - - @pytest.mark.parametrize( "netrc_contents", ("machine example.com login username password pass\n",), diff --git a/tests/test_client_session.py b/tests/test_client_session.py index d76e718ebb3..ade8a67b7ca 100644 --- a/tests/test_client_session.py +++ b/tests/test_client_session.py @@ -26,6 +26,7 @@ from aiohttp.helpers import DEBUG from aiohttp.http import RawResponseMessage from aiohttp.pytest_plugin import AiohttpServer +from aiohttp.test_utils import TestServer from aiohttp.tracing import Trace @@ -76,7 +77,24 @@ def params(): ) -async def test_close_coro(create_session) -> None: +@pytest.fixture +async def auth_server(aiohttp_server: AiohttpServer) -> TestServer: + """Create a server with an auth handler that returns auth header or 'no_auth'.""" + + async def handler(request: web.Request) -> web.Response: + auth_header = request.headers.get(hdrs.AUTHORIZATION) + if auth_header: + return web.Response(text=f"auth:{auth_header}") + return web.Response(text="no_auth") + + app = web.Application() + app.router.add_get("/", handler) + return await aiohttp_server(app) + + +async def test_close_coro( + create_session: Callable[..., Awaitable[ClientSession]], +) -> None: session = await create_session() await session.close() @@ -1322,3 +1340,64 @@ async def test_properties( value = uuid4() setattr(session, inner_name, value) assert value == getattr(session, outer_name) + + +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_with_trust_env(auth_server: TestServer) -> None: + """Test that netrc authentication works with ClientSession when NETRC env var is set.""" + async with ( + ClientSession(trust_env=True) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + # Base64 encoded "netrc_user:netrc_pass" is "bmV0cmNfdXNlcjpuZXRyY19wYXNz" + assert text == "auth:Basic bmV0cmNfdXNlcjpuZXRyY19wYXNz" + + +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_skipped_without_trust_env(auth_server: TestServer) -> None: + """Test that netrc authentication is skipped when trust_env=False.""" + async with ( + ClientSession(trust_env=False) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + assert text == "no_auth" + + +@pytest.mark.usefixtures("no_netrc") +async def test_netrc_auth_skipped_without_netrc_env(auth_server: TestServer) -> None: + """Test that netrc authentication is skipped when NETRC env var is not set.""" + async with ( + ClientSession(trust_env=True) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + assert text == "no_auth" + + +@pytest.mark.usefixtures("netrc_default_contents") +async def test_netrc_auth_overridden_by_explicit_auth(auth_server: TestServer) -> None: + """Test that explicit auth parameter overrides netrc authentication.""" + async with ( + ClientSession(trust_env=True) as session, + session.get( + auth_server.make_url("/"), + auth=aiohttp.BasicAuth("explicit_user", "explicit_pass"), + ) as resp, + ): + text = await resp.text() + # Base64 encoded "explicit_user:explicit_pass" is "ZXhwbGljaXRfdXNlcjpleHBsaWNpdF9wYXNz" + assert text == "auth:Basic ZXhwbGljaXRfdXNlcjpleHBsaWNpdF9wYXNz" + + +@pytest.mark.usefixtures("netrc_other_host") +async def test_netrc_auth_host_not_in_netrc(auth_server: TestServer) -> None: + """Test that netrc lookup returns None when host is not in netrc file.""" + async with ( + ClientSession(trust_env=True) as session, + session.get(auth_server.make_url("/")) as resp, + ): + text = await resp.text() + # Should not have auth since the host is not in netrc + assert text == "no_auth" From ff12dadcfa2a105c0dd8cd21d48a76c1677b722b Mon Sep 17 00:00:00 2001 From: Sam Bull Date: Fri, 17 Oct 2025 13:50:39 +0100 Subject: [PATCH 51/51] Release v3.13.1 (#11684) --- CHANGES.rst | 82 ++++++++++++++++++++++++++++++++++++++ CHANGES/10889.bugfix.rst | 4 -- CHANGES/11603.bugfix.rst | 1 - CHANGES/11623.bugfix | 1 - CHANGES/11633.feature.rst | 2 - CHANGES/11634.bugfix.rst | 1 - CHANGES/11673.bugfix.rst | 2 - CHANGES/9951.packaging.rst | 2 - aiohttp/__init__.py | 2 +- 9 files changed, 83 insertions(+), 14 deletions(-) delete mode 100644 CHANGES/10889.bugfix.rst delete mode 100644 CHANGES/11603.bugfix.rst delete mode 100644 CHANGES/11623.bugfix delete mode 100644 CHANGES/11633.feature.rst delete mode 100644 CHANGES/11634.bugfix.rst delete mode 100644 CHANGES/11673.bugfix.rst delete mode 100644 CHANGES/9951.packaging.rst diff --git a/CHANGES.rst b/CHANGES.rst index 0bab04c4072..a14296b581a 100644 --- a/CHANGES.rst +++ b/CHANGES.rst @@ -10,6 +10,88 @@ .. towncrier release notes start +3.13.1 (2025-10-17) +=================== + +Features +-------- + +- Make configuration options in ``AppRunner`` also available in ``run_app()`` + -- by :user:`Cycloctane`. + + + *Related issues and pull requests on GitHub:* + :issue:`11633`. + + + +Bug fixes +--------- + +- Switched to `backports.zstd` for Python <3.14 and fixed zstd decompression for chunked zstd streams -- by :user:`ZhaoMJ`. + + Note: Users who installed ``zstandard`` for support on Python <3.14 will now need to install + ``backports.zstd`` instead (installing ``aiohttp[speedups]`` will do this automatically). + + + *Related issues and pull requests on GitHub:* + :issue:`11623`. + + + +- Updated ``Content-Type`` header parsing to return ``application/octet-stream`` when header contains invalid syntax. + See :rfc:`9110#section-8.3-5`. + + -- by :user:`sgaist`. + + + *Related issues and pull requests on GitHub:* + :issue:`10889`. + + + +- Fixed Python 3.14 support when built without ``zstd`` support -- by :user:`JacobHenner`. + + + *Related issues and pull requests on GitHub:* + :issue:`11603`. + + + +- Fixed blocking I/O in the event loop when using netrc authentication by moving netrc file lookup to an executor -- by :user:`bdraco`. + + + *Related issues and pull requests on GitHub:* + :issue:`11634`. + + + +- Fixed routing to a sub-application added via ``.add_domain()`` not working + if the same path exists on the parent app. -- by :user:`Dreamsorcerer`. + + + *Related issues and pull requests on GitHub:* + :issue:`11673`. + + + + +Packaging updates and notes for downstreams +------------------------------------------- + +- Moved core packaging metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` + -- by :user:`cdce8p`. + + + *Related issues and pull requests on GitHub:* + :issue:`9951`. + + + + +---- + + 3.13.0 (2025-10-06) =================== diff --git a/CHANGES/10889.bugfix.rst b/CHANGES/10889.bugfix.rst deleted file mode 100644 index 4bba5595b9e..00000000000 --- a/CHANGES/10889.bugfix.rst +++ /dev/null @@ -1,4 +0,0 @@ -Updated ``Content-Type`` header parsing to return ``application/octet-stream`` when header contains invalid syntax. -See :rfc:`9110#section-8.3-5`. - --- by :user:`sgaist`. diff --git a/CHANGES/11603.bugfix.rst b/CHANGES/11603.bugfix.rst deleted file mode 100644 index 1475698f17a..00000000000 --- a/CHANGES/11603.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed Python 3.14 support when built without ``zstd`` support -- by :user:`JacobHenner`. diff --git a/CHANGES/11623.bugfix b/CHANGES/11623.bugfix deleted file mode 100644 index 447dd56388c..00000000000 --- a/CHANGES/11623.bugfix +++ /dev/null @@ -1 +0,0 @@ -Switched to `backports.zstd` for Python <3.14 and fixed zstd decompression for chunked zstd streams -- by :user:`ZhaoMJ`. diff --git a/CHANGES/11633.feature.rst b/CHANGES/11633.feature.rst deleted file mode 100644 index 374d15a2acd..00000000000 --- a/CHANGES/11633.feature.rst +++ /dev/null @@ -1,2 +0,0 @@ -Make configuration options in ``AppRunner`` also available in ``run_app()`` --- by :user:`Cycloctane`. diff --git a/CHANGES/11634.bugfix.rst b/CHANGES/11634.bugfix.rst deleted file mode 100644 index 649577c50b9..00000000000 --- a/CHANGES/11634.bugfix.rst +++ /dev/null @@ -1 +0,0 @@ -Fixed blocking I/O in the event loop when using netrc authentication by moving netrc file lookup to an executor -- by :user:`bdraco`. diff --git a/CHANGES/11673.bugfix.rst b/CHANGES/11673.bugfix.rst deleted file mode 100644 index accbe850847..00000000000 --- a/CHANGES/11673.bugfix.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed routing to a sub-application added via ``.add_domain()`` not working -if the same path exists on the parent app. -- by :user:`Dreamsorcerer`. diff --git a/CHANGES/9951.packaging.rst b/CHANGES/9951.packaging.rst deleted file mode 100644 index 5f567d23fac..00000000000 --- a/CHANGES/9951.packaging.rst +++ /dev/null @@ -1,2 +0,0 @@ -Moved core packaging metadata from :file:`setup.cfg` to :file:`pyproject.toml` per :pep:`621` --- by :user:`cdce8p`. diff --git a/aiohttp/__init__.py b/aiohttp/__init__.py index 443b05ff275..017c7a20525 100644 --- a/aiohttp/__init__.py +++ b/aiohttp/__init__.py @@ -1,4 +1,4 @@ -__version__ = "3.13.1.dev0" +__version__ = "3.13.1" from typing import TYPE_CHECKING, Tuple