diff --git a/.codecov.yml b/.codecov.yml
index aa8bed59986..f1d271533be 100644
--- a/.codecov.yml
+++ b/.codecov.yml
@@ -1,5 +1,7 @@
codecov:
branch: master
+ notify:
+ after_n_builds: 10
coverage:
range: "95..100"
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
index b1222becc14..cd8b2782b43 100644
--- a/.github/dependabot.yml
+++ b/.github/dependabot.yml
@@ -26,6 +26,7 @@ updates:
target-branch: "3.8"
schedule:
interval: "daily"
+ open-pull-requests-limit: 10
# Maintain dependencies for Python aiohttp 3.8
- package-ecosystem: "pip"
diff --git a/.github/workflows/auto-merge.yml b/.github/workflows/auto-merge.yml
new file mode 100644
index 00000000000..5a9408a182c
--- /dev/null
+++ b/.github/workflows/auto-merge.yml
@@ -0,0 +1,22 @@
+name: Dependabot auto-merge
+on: pull_request_target
+
+permissions:
+ pull-requests: write
+ contents: write
+
+jobs:
+ dependabot:
+ runs-on: ubuntu-latest
+ if: ${{ github.actor == 'dependabot[bot]' }}
+ steps:
+ - name: Dependabot metadata
+ id: metadata
+ uses: dependabot/fetch-metadata@v1.1.1
+ with:
+ github-token: "${{ secrets.GITHUB_TOKEN }}"
+ - name: Enable auto-merge for Dependabot PRs
+ run: gh pr merge --auto --squash "$PR_URL"
+ env:
+ PR_URL: ${{github.event.pull_request.html_url}}
+ GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml
index 6c10ef1b3e4..f6fc9608971 100644
--- a/.github/workflows/backport.yml
+++ b/.github/workflows/backport.yml
@@ -21,12 +21,12 @@ jobs:
# if: ${{ github.event.pull_request.head.repo.full_name == 'aio-libs/aiohttp' }}
steps:
- id: generate_token
- uses: tibdex/github-app-token@v1.3
+ uses: tibdex/github-app-token@v1.4
with:
app_id: ${{ secrets.BOT_APP_ID }}
private_key: ${{ secrets.BOT_PRIVATE_KEY }}
- name: Backport
- uses: sqren/backport-github-action@v1.0.40
+ uses: sqren/backport-github-action@v1.0.41
with:
# Required
# Token to authenticate requests
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index b4bb10a08c9..0b92a258785 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -22,7 +22,7 @@ jobs:
timeout-minutes: 5
steps:
- name: Checkout
- uses: actions/checkout@v2.3.4
+ uses: actions/checkout@v2.3.5
with:
submodules: true
- name: Setup Python 3.8
@@ -76,7 +76,7 @@ jobs:
needs: lint
strategy:
matrix:
- pyver: [3.7, 3.8, 3.9]
+ pyver: [3.7, 3.8, 3.9, '3.10']
no-extensions: ['', 'Y']
os: [ubuntu, macos, windows]
exclude:
@@ -98,7 +98,7 @@ jobs:
timeout-minutes: 15
steps:
- name: Checkout
- uses: actions/checkout@v2.3.4
+ uses: actions/checkout@v2.3.5
with:
submodules: true
- name: Setup Python ${{ matrix.pyver }}
@@ -116,6 +116,8 @@ jobs:
path: ${{ steps.pip-cache.outputs.dir }}
restore-keys: |
pip-ci-${{ runner.os }}-${{ matrix.pyver }}-${{ matrix.no-extensions }}-
+ - name: Upgrade wheel # Needed for proxy.py install not to explode
+ run: pip install -U wheel
- name: Cythonize
if: ${{ matrix.no-extensions == '' }}
run: |
@@ -128,7 +130,7 @@ jobs:
make vvtest
python -m coverage xml
- name: Upload coverage
- uses: codecov/codecov-action@v1.5.2
+ uses: codecov/codecov-action@v2.1.0
with:
file: ./coverage.xml
flags: unit
@@ -151,7 +153,7 @@ jobs:
needs: pre-deploy
steps:
- name: Checkout
- uses: actions/checkout@v2.3.4
+ uses: actions/checkout@v2.3.5
with:
submodules: true
- name: Setup Python 3.8
@@ -174,7 +176,7 @@ jobs:
name: Linux
strategy:
matrix:
- pyver: [cp37-cp37m, cp38-cp38, cp39-cp39]
+ pyver: [cp37-cp37m, cp38-cp38, cp39-cp39, cp310-cp310]
arch: [x86_64, aarch64, i686, ppc64le, s390x]
fail-fast: false
runs-on: ubuntu-latest
@@ -184,7 +186,7 @@ jobs:
needs: pre-deploy
steps:
- name: Checkout
- uses: actions/checkout@v2.3.4
+ uses: actions/checkout@v2.3.5
with:
submodules: true
- name: Set up QEMU
@@ -222,7 +224,7 @@ jobs:
name: Binary wheels
strategy:
matrix:
- pyver: [3.7, 3.8, 3.9]
+ pyver: [3.7, 3.8, 3.9, '3.10']
os: [macos, windows]
arch: [x86, x64]
exclude:
@@ -233,7 +235,7 @@ jobs:
needs: pre-deploy
steps:
- name: Checkout
- uses: actions/checkout@v2.3.4
+ uses: actions/checkout@v2.3.5
with:
submodules: true
- name: Setup Python 3.8
diff --git a/.github/workflows/update-pre-commit.yml b/.github/workflows/update-pre-commit.yml
index d382c36a466..44c784e6aa5 100644
--- a/.github/workflows/update-pre-commit.yml
+++ b/.github/workflows/update-pre-commit.yml
@@ -7,7 +7,7 @@ jobs:
if: github.repository_owner == 'aiohttp'
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v2.3.4
+ - uses: actions/checkout@v2.3.5
- name: Set up Python
uses: actions/setup-python@v2
with:
@@ -19,12 +19,12 @@ jobs:
- name: Run pre-commit autoupdate
run: pre-commit autoupdate
- id: generate_token
- uses: tibdex/github-app-token@v1.3
+ uses: tibdex/github-app-token@v1.4
with:
app_id: ${{ secrets.BOT_APP_ID }}
private_key: ${{ secrets.BOT_PRIVATE_KEY }}
- name: Create Pull Request
- uses: peter-evans/create-pull-request@v3.10.0
+ uses: peter-evans/create-pull-request@v3.10.1
with:
token: ${{ steps.generate_token.outputs.token }}
branch: update/pre-commit-autoupdate
diff --git a/.mypy.ini b/.mypy.ini
index b5cedf90b7c..ebcd461441c 100644
--- a/.mypy.ini
+++ b/.mypy.ini
@@ -1,6 +1,7 @@
[mypy]
files = aiohttp, examples, tests
check_untyped_defs = True
+exclude = examples/legacy/
follow_imports_for_stubs = True
#disallow_any_decorated = True
disallow_any_generics = True
@@ -40,3 +41,6 @@ ignore_missing_imports = True
[mypy-uvloop]
ignore_missing_imports = True
+
+[mypy-python_on_whales]
+ignore_missing_imports = True
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index ba83762c34f..1cf75a7fb3c 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -7,24 +7,24 @@ repos:
entry: ./tools/check_changes.py
pass_filenames: false
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: 'v3.3.0'
+ rev: 'v4.0.1'
hooks:
- id: check-merge-conflict
- repo: https://github.com/asottile/yesqa
- rev: v1.2.2
+ rev: v1.2.3
hooks:
- id: yesqa
-- repo: https://github.com/pre-commit/mirrors-isort
- rev: 'v5.6.4'
+- repo: https://github.com/PyCQA/isort
+ rev: '5.9.3'
hooks:
- id: isort
- repo: https://github.com/psf/black
- rev: '20.8b1'
+ rev: '21.9b0'
hooks:
- id: black
language_version: python3 # Should be a command that runs python3.6+
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: 'v3.3.0'
+ rev: 'v4.0.1'
hooks:
- id: end-of-file-fixer
exclude: >-
@@ -60,18 +60,18 @@ repos:
- id: detect-private-key
exclude: ^examples/
- repo: https://github.com/asottile/pyupgrade
- rev: 'v2.7.4'
+ rev: 'v2.29.0'
hooks:
- id: pyupgrade
args: ['--py36-plus']
-- repo: https://gitlab.com/pycqa/flake8
- rev: '3.8.4'
+- repo: https://github.com/PyCQA/flake8
+ rev: '4.0.1'
hooks:
- id: flake8
exclude: "^docs/"
- repo: git://github.com/Lucas-C/pre-commit-hooks-markup
- rev: v1.0.0
+ rev: v1.0.1
hooks:
- id: rst-linter
files: >-
diff --git a/.readthedocs.yml b/.readthedocs.yml
index e2e8d918392..90fe80896bc 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -1,5 +1,21 @@
+# Read the Docs configuration file
+# See https://docs.readthedocs.io/en/stable/config-file/v2.html
+# for details
+
+---
+version: 2
+
+submodules:
+ include: all # []
+ exclude: []
+ recursive: true
+
build:
image: latest
python:
- version: 3.6
- pip_install: false
+ version: 3.8
+ install:
+ - method: pip
+ path: .
+ - requirements: requirements/doc.txt
+...
diff --git a/CHANGES.rst b/CHANGES.rst
index f064f4895ce..6301a2a17a5 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,7 +1,3 @@
-=========
-Changelog
-=========
-
..
You should *NOT* be adding new change log entries to this file, this
file is managed by towncrier. You *may* edit previous change logs to
diff --git a/CHANGES/3450.bugfix b/CHANGES/3450.bugfix
new file mode 100644
index 00000000000..6b82b4c0481
--- /dev/null
+++ b/CHANGES/3450.bugfix
@@ -0,0 +1 @@
+Started using `MultiLoopChildWatcher` when it's available under POSIX while setting up the test I/O loop.
diff --git a/CHANGES/3559.doc b/CHANGES/3559.doc
index f15261dd5e0..aa49970bc18 100644
--- a/CHANGES/3559.doc
+++ b/CHANGES/3559.doc
@@ -1 +1 @@
-Clarified WebSocketResponse closure in quickstart example.
+Clarified ``WebSocketResponse`` closure in the quick start example.
diff --git a/CHANGES/3828.feature b/CHANGES/3828.feature
index 7d31a2bb243..9d78d813e95 100644
--- a/CHANGES/3828.feature
+++ b/CHANGES/3828.feature
@@ -1,4 +1,4 @@
-Disable implicit switch-back to pure python mode. The build fails loudly if aiohttp
-cannot be compiled with C Accellerators. Use AIOHTTP_NO_EXTENSIONS=1 to explicitly
+Disabled implicit switch-back to pure python mode. The build fails loudly if aiohttp
+cannot be compiled with C Accelerators. Use `AIOHTTP_NO_EXTENSIONS=1` to explicitly
disable C Extensions complication and switch to Pure-Python mode. Note that Pure-Python
mode is significantly slower than compiled one.
diff --git a/CHANGES/4054.feature b/CHANGES/4054.feature
index 436bf352f6d..e34d741cba1 100644
--- a/CHANGES/4054.feature
+++ b/CHANGES/4054.feature
@@ -1 +1 @@
-Implemented readuntil in StreamResponse
+Implemented ``readuntil`` in ``StreamResponse``
diff --git a/CHANGES/4247.1.misc b/CHANGES/4247.1.misc
new file mode 100644
index 00000000000..86463d0577d
--- /dev/null
+++ b/CHANGES/4247.1.misc
@@ -0,0 +1 @@
+Automated running autobahn test suite by integrating with pytest.
diff --git a/CHANGES/4277.feature b/CHANGES/4277.feature
index bc5b360bf73..94677b1e855 100644
--- a/CHANGES/4277.feature
+++ b/CHANGES/4277.feature
@@ -1 +1 @@
-Add set_cookie and del_cookie methods to HTTPException
+Added ``set_cookie`` and ``del_cookie`` methods to ``HTTPException``
diff --git a/CHANGES/4299.bugfix b/CHANGES/4299.bugfix
index 8b82acbdacc..78ae9ec042a 100644
--- a/CHANGES/4299.bugfix
+++ b/CHANGES/4299.bugfix
@@ -1 +1 @@
-Delete older code in example (examples/web_classview.py)
+Delete older code in example (:file:`examples/web_classview.py`)
diff --git a/CHANGES/4302.bugfix b/CHANGES/4302.bugfix
index af14834f2fc..eb72de901dd 100644
--- a/CHANGES/4302.bugfix
+++ b/CHANGES/4302.bugfix
@@ -1 +1 @@
-Fixed the support of route handlers wrapped by functools.partial()
+Fixed the support of route handlers wrapped by :py:func:`functools.partial`
diff --git a/CHANGES/4452.doc b/CHANGES/4452.doc
index 306f751afc5..17681b2ee11 100644
--- a/CHANGES/4452.doc
+++ b/CHANGES/4452.doc
@@ -1 +1 @@
-Fix typo in client_quickstart docs.
+Fixed a typo in the ``client_quickstart`` doc.
diff --git a/CHANGES/4686.feature b/CHANGES/4686.feature
new file mode 100644
index 00000000000..1b74265fb94
--- /dev/null
+++ b/CHANGES/4686.feature
@@ -0,0 +1 @@
+Add a request handler type alias ``aiohttp.typedefs.Handler``.
diff --git a/CHANGES/4700.feature b/CHANGES/4700.feature
index dfcd88ff960..da691aa7c0e 100644
--- a/CHANGES/4700.feature
+++ b/CHANGES/4700.feature
@@ -1,6 +1,6 @@
-AioHTTPTestCase is more async friendly now.
+``AioHTTPTestCase`` is more async friendly now.
-For people who use unittest and are used to use unittest.TestCase
-it will be easier to write new test cases like the sync version of the TestCase class,
+For people who use unittest and are used to use :py:exc:`~unittest.TestCase`
+it will be easier to write new test cases like the sync version of the :py:exc:`~unittest.TestCase` class,
without using the decorator `@unittest_run_loop`, just `async def test_*`.
-The only difference is that for the people using python3.7 and below a new dependency is needed, it is `asynctestcase`.
+The only difference is that for the people using python3.7 and below a new dependency is needed, it is ``asynctestcase``.
diff --git a/CHANGES/4818.feature b/CHANGES/4818.feature
new file mode 100644
index 00000000000..158e4ebae84
--- /dev/null
+++ b/CHANGES/4818.feature
@@ -0,0 +1 @@
+Add validation of HTTP header keys and values to prevent header injection.
diff --git a/CHANGES/5326.doc b/CHANGES/5326.doc
index 74aff4c4225..5564425aff4 100644
--- a/CHANGES/5326.doc
+++ b/CHANGES/5326.doc
@@ -1 +1 @@
-Refactor OpenAPI/Swagger aiohttp addons, added aio-openapi
+Refactored OpenAPI/Swagger aiohttp addons, added ``aio-openapi``
diff --git a/CHANGES/5634.feature b/CHANGES/5634.feature
new file mode 100644
index 00000000000..1240147019e
--- /dev/null
+++ b/CHANGES/5634.feature
@@ -0,0 +1 @@
+A warning was added, when a cookie's length exceeds the :rfc:`6265` minimum client support -- :user:`anesabml`.
diff --git a/CHANGES/5727.bugfix b/CHANGES/5727.bugfix
new file mode 100644
index 00000000000..7f2e3a2a9d8
--- /dev/null
+++ b/CHANGES/5727.bugfix
@@ -0,0 +1 @@
+ Remove incorrect default from docs
diff --git a/CHANGES/5783.feature b/CHANGES/5783.feature
new file mode 100644
index 00000000000..6b5c534f66f
--- /dev/null
+++ b/CHANGES/5783.feature
@@ -0,0 +1 @@
+Started keeping the ``Authorization`` header during HTTP -> HTTPS redirects when the host remains the same.
diff --git a/CHANGES/5806.misc b/CHANGES/5806.misc
new file mode 100644
index 00000000000..e2f30f91b5f
--- /dev/null
+++ b/CHANGES/5806.misc
@@ -0,0 +1 @@
+Remove last remnants of attrs library.
diff --git a/CHANGES/5870.misc b/CHANGES/5870.misc
new file mode 100644
index 00000000000..e2cdd194380
--- /dev/null
+++ b/CHANGES/5870.misc
@@ -0,0 +1 @@
+Simplify generator expression.
diff --git a/CHANGES/5877.bugfix b/CHANGES/5877.bugfix
new file mode 100644
index 00000000000..5a8108a9a45
--- /dev/null
+++ b/CHANGES/5877.bugfix
@@ -0,0 +1 @@
+Uses :py:class:`~asyncio.ThreadedChildWatcher` under POSIX to allow setting up test loop in non-main thread.
diff --git a/CHANGES/5890.misc b/CHANGES/5890.misc
new file mode 100644
index 00000000000..489cfc336a7
--- /dev/null
+++ b/CHANGES/5890.misc
@@ -0,0 +1 @@
+Update to Mypy 0.910.
diff --git a/CHANGES/5905.bugfix b/CHANGES/5905.bugfix
new file mode 100644
index 00000000000..0e581b5cbf3
--- /dev/null
+++ b/CHANGES/5905.bugfix
@@ -0,0 +1 @@
+Removed the deprecated ``loop`` argument from the ``asyncio.sleep``/``gather`` calls
diff --git a/CHANGES/5914.misc b/CHANGES/5914.misc
new file mode 100644
index 00000000000..f876dc3ca92
--- /dev/null
+++ b/CHANGES/5914.misc
@@ -0,0 +1 @@
+Changed the SVG logos to be more optimized and the viewbox to 0 0 24 24, while keeping the same height and width -- :user:`ShadiestGoat`.
diff --git a/CHANGES/5927.feature b/CHANGES/5927.feature
new file mode 100644
index 00000000000..dac4f3e5eb9
--- /dev/null
+++ b/CHANGES/5927.feature
@@ -0,0 +1 @@
+Added support for Python 3.10 to Github Actions CI/CD workflows and fix the related deprecation warnings -- :user:`Hanaasagi`.
diff --git a/CHANGES/5930.feature b/CHANGES/5930.feature
new file mode 100644
index 00000000000..17cecee40d9
--- /dev/null
+++ b/CHANGES/5930.feature
@@ -0,0 +1 @@
+Switched ``chardet`` to ``charset-normalizer`` for guessing the HTTP payload body encoding -- :user:`Ousret`.
diff --git a/CHANGES/5932.misc b/CHANGES/5932.misc
new file mode 100644
index 00000000000..c9d96ad9361
--- /dev/null
+++ b/CHANGES/5932.misc
@@ -0,0 +1 @@
+Enable auto-merging of Dependabot PRs.
diff --git a/CHANGES/5992.feature b/CHANGES/5992.feature
new file mode 100644
index 00000000000..5667d2de24b
--- /dev/null
+++ b/CHANGES/5992.feature
@@ -0,0 +1,3 @@
+Added support for HTTPS proxies to the extent CPython's
+:py:mod:`asyncio` supports it -- by :user:`bmbouter`,
+:user:`jborean93` and :user:`webknjaz`.
diff --git a/CHANGES/6002.misc b/CHANGES/6002.misc
new file mode 100644
index 00000000000..5df927cf65d
--- /dev/null
+++ b/CHANGES/6002.misc
@@ -0,0 +1,2 @@
+Implemented end-to-end testing of sending HTTP and HTTPS requests
+via ``proxy.py``.
diff --git a/CHANGES/6045.misc b/CHANGES/6045.misc
new file mode 100644
index 00000000000..a27f2d17d4b
--- /dev/null
+++ b/CHANGES/6045.misc
@@ -0,0 +1,3 @@
+Added ``commit``, ``gh``, ``issue``, ``pr``
+and ``user`` RST roles in Sphinx
+-- :user:`webknjaz`.
diff --git a/CHANGES/6079.feature b/CHANGES/6079.feature
new file mode 100644
index 00000000000..25dc6039b44
--- /dev/null
+++ b/CHANGES/6079.feature
@@ -0,0 +1 @@
+Add Trove classifier and create binary wheels for 3.10. -- :user:`hugovk`.
diff --git a/CHANGES/README.rst b/CHANGES/README.rst
new file mode 100644
index 00000000000..c6b5153913a
--- /dev/null
+++ b/CHANGES/README.rst
@@ -0,0 +1,95 @@
+.. _Adding change notes with your PRs:
+
+Adding change notes with your PRs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+It is very important to maintain a log for news of how
+updating to the new version of the software will affect
+end-users. This is why we enforce collection of the change
+fragment files in pull requests as per `Towncrier philosophy`_.
+
+The idea is that when somebody makes a change, they must record
+the bits that would affect end-users only including information
+that would be useful to them. Then, when the maintainers publish
+a new release, they'll automatically use these records to compose
+a change log for the respective version. It is important to
+understand that including unnecessary low-level implementation
+related details generates noise that is not particularly useful
+to the end-users most of the time. And so such details should be
+recorded in the Git history rather than a changelog.
+
+Alright! So how to add a news fragment?
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+``aiohttp`` uses `towncrier `_
+for changelog management.
+To submit a change note about your PR, add a text file into the
+``CHANGES/`` folder. It should contain an
+explanation of what applying this PR will change in the way
+end-users interact with the project. One sentence is usually
+enough but feel free to add as many details as you feel necessary
+for the users to understand what it means.
+
+**Use the past tense** for the text in your fragment because,
+combined with others, it will be a part of the "news digest"
+telling the readers **what changed** in a specific version of
+the library *since the previous version*. You should also use
+reStructuredText syntax for highlighting code (inline or block),
+linking parts of the docs or external sites.
+If you wish to sign your change, feel free to add ``-- by
+:user:`github-username``` at the end (replace ``github-username``
+with your own!).
+
+Finally, name your file following the convention that Towncrier
+understands: it should start with the number of an issue or a
+PR followed by a dot, then add a patch type, like ``feature``,
+``doc``, ``misc`` etc., and add ``.rst`` as a suffix. If you
+need to add more than one fragment, you may add an optional
+sequence number (delimited with another period) between the type
+and the suffix.
+
+In general the name will follow ``..rst`` pattern,
+where the categories are:
+
+- ``feature``: Any new feature
+- ``bugfix``: A bug fix
+- ``doc``: A change to the documentation
+- ``misc``: Changes internal to the repo like CI, test and build changes
+- ``removal``: For deprecations and removals of an existing feature or behavior
+
+A pull request may have more than one of these components, for example
+a code change may introduce a new feature that deprecates an old
+feature, in which case two fragments should be added. It is not
+necessary to make a separate documentation fragment for documentation
+changes accompanying the relevant code changes.
+
+Examples for adding changelog entries to your Pull Requests
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+File :file:`CHANGES/6045.doc.1.rst`:
+
+.. code-block:: rst
+
+ Added a ``:user:`` role to Sphinx config -- by :user:`webknjaz`
+
+File :file:`CHANGES/4431.bugfix.rst`:
+
+.. code-block:: rst
+
+ Fixed HTTP client requests to honor ``no_proxy`` environment
+ variables -- by :user:`scirelli`
+
+File :file:`CHANGES/4594.feature.rst`:
+
+.. code-block:: rst
+
+ Added support for ``ETag`` to :py:class:`~aiohttp.web.FileResponse`
+ -- by :user:`greshilov`, :user:`serhiy-storchaka` and :user:`asvetlov`
+
+.. tip::
+
+ See :file:`pyproject.toml` for all available categories
+ (``tool.towncrier.type``).
+
+.. _Towncrier philosophy:
+ https://towncrier.readthedocs.io/en/actual-freaking-docs/#philosophy
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index f9544ce9b08..f42331eeaef 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -7,6 +7,7 @@ Adam Horacek
Adam Mills
Adrian Krupa
Adrián Chaves
+Ahmed Tahri
Alan Tse
Alec Hanefeld
Alejandro Gómez
@@ -28,6 +29,7 @@ Alexandru Mihai
Alexey Firsov
Alexey Popravka
Alexey Stepanov
+Almaz Salakhov
Amin Etesamian
Amit Tulshyan
Amy Boyle
@@ -47,6 +49,7 @@ Anton Zhdan-Pushkin
Arseny Timoniq
Artem Yushkovskiy
Arthur Darcet
+Austin Scola
Ben Bader
Ben Timby
Benedikt Reinartz
@@ -55,6 +58,7 @@ Boris Feld
Borys Vorona
Boyi Chen
Brett Cannon
+Brian Bouterse
Brian C. Lane
Brian Muller
Bruce Merry
@@ -114,6 +118,7 @@ Felix Yan
Fernanda Guimarães
FichteFoll
Florian Scheffler
+Franek Magiera
Frederik Gladhorn
Frederik Peter Aalund
Gabriel Tremblay
@@ -131,6 +136,7 @@ Hrishikesh Paranjape
Hu Bo
Hugh Young
Hugo Herter
+Hugo van Kemenade
Hynek Schlawack
Igor Alexandrov
Igor Davydenko
@@ -162,6 +168,7 @@ Jonas Obrist
Jonathan Wright
Jonny Tan
Joongi Kim
+Jordan Borean
Josep Cugat
Josh Junon
Joshu Coats
@@ -240,6 +247,7 @@ Pawel Miech
Pepe Osca
Philipp A.
Pieter van Beek
+Qiao Han
Rafael Viotti
Raphael Bialon
Raúl Cumplido
diff --git a/HISTORY.rst b/HISTORY.rst
index 44c1484f917..b3c3b97f886 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -950,7 +950,7 @@ Misc
- Added session's `raise_for_status` parameter, automatically calls
raise_for_status() on any request. (`#1724 `_)
-- `response.json()` raises `ClientReponseError` exception if response's
+- `response.json()` raises `ClientResponseError` exception if response's
content type does not match (`#1723 `_)
- Cleanup timer and loop handle on any client exception.
diff --git a/Makefile b/Makefile
index 0e7189760bb..81b8a0325c3 100644
--- a/Makefile
+++ b/Makefile
@@ -92,10 +92,12 @@ test: .develop
.PHONY: vtest
vtest: .develop
@pytest -s -v
+ @python -X dev -m pytest -s -v -m dev_mode
.PHONY: vvtest
vvtest: .develop
@pytest -vv
+ @python -X dev -m pytest -s -vv -m dev_mode
.PHONY: cov-dev
cov-dev: .develop
diff --git a/README.rst b/README.rst
index 46d93dbed33..143c3a59baf 100644
--- a/README.rst
+++ b/README.rst
@@ -164,17 +164,15 @@ Requirements
- Python >= 3.7
- async-timeout_
-- attrs_
-- chardet_
+- charset-normalizer_
- multidict_
- yarl_
Optionally you may install the cChardet_ and aiodns_ libraries (highly
recommended for sake of speed).
-.. _chardet: https://pypi.python.org/pypi/chardet
+.. _charset-normalizer: https://pypi.org/project/charset-normalizer
.. _aiodns: https://pypi.python.org/pypi/aiodns
-.. _attrs: https://github.com/python-attrs/attrs
.. _multidict: https://pypi.python.org/pypi/multidict
.. _yarl: https://pypi.python.org/pypi/yarl
.. _async-timeout: https://pypi.python.org/pypi/async_timeout
diff --git a/aiohttp/_http_writer.pyx b/aiohttp/_http_writer.pyx
index 84b42fa1c35..eff85219586 100644
--- a/aiohttp/_http_writer.pyx
+++ b/aiohttp/_http_writer.pyx
@@ -111,6 +111,14 @@ cdef str to_str(object s):
return str(s)
+cdef void _safe_header(str string) except *:
+ if "\r" in string or "\n" in string:
+ raise ValueError(
+ "Newline or carriage return character detected in HTTP status message or "
+ "header. This is a potential security issue."
+ )
+
+
def _serialize_headers(str status_line, headers):
cdef Writer writer
cdef object key
@@ -119,6 +127,10 @@ def _serialize_headers(str status_line, headers):
_init_writer(&writer)
+ for key, val in headers.items():
+ _safe_header(to_str(key))
+ _safe_header(to_str(val))
+
try:
if _write_str(&writer, status_line) < 0:
raise
diff --git a/aiohttp/client.py b/aiohttp/client.py
index 62b18d07ff6..c14a682a404 100644
--- a/aiohttp/client.py
+++ b/aiohttp/client.py
@@ -154,7 +154,7 @@ class ClientTimeout:
# to create a timeout specific for a single request, either
# - create a completely new one to overwrite the default
- # - or use http://www.attrs.org/en/stable/api.html#attr.evolve
+ # - or use https://docs.python.org/3/library/dataclasses.html#dataclasses.replace
# to overwrite the defaults
@@ -263,7 +263,7 @@ def __init__(
real_headers = CIMultiDict()
self._default_headers = real_headers # type: CIMultiDict[str]
if skip_auto_headers is not None:
- self._skip_auto_headers = frozenset([istr(i) for i in skip_auto_headers])
+ self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
else:
self._skip_auto_headers = frozenset()
@@ -566,7 +566,16 @@ async def _request(
elif not scheme:
parsed_url = url.join(parsed_url)
- if url.origin() != parsed_url.origin():
+ is_same_host_https_redirect = (
+ url.host == parsed_url.host
+ and parsed_url.scheme == "https"
+ and url.scheme == "http"
+ )
+
+ if (
+ url.origin() != parsed_url.origin()
+ and not is_same_host_https_redirect
+ ):
auth = None
headers.pop(hdrs.AUTHORIZATION, None)
diff --git a/aiohttp/client_exceptions.py b/aiohttp/client_exceptions.py
index 808c1cc614e..5f2f8958e84 100644
--- a/aiohttp/client_exceptions.py
+++ b/aiohttp/client_exceptions.py
@@ -85,7 +85,7 @@ def __repr__(self) -> str:
args += f", message={self.message!r}"
if self.headers is not None:
args += f", headers={self.headers!r}"
- return "{}({})".format(type(self).__name__, args)
+ return f"{type(self).__name__}({args})"
class ContentTypeError(ClientResponseError):
diff --git a/aiohttp/client_reqrep.py b/aiohttp/client_reqrep.py
index c63b73bcdf8..41602afe703 100644
--- a/aiohttp/client_reqrep.py
+++ b/aiohttp/client_reqrep.py
@@ -70,7 +70,7 @@
try:
import cchardet as chardet
except ImportError: # pragma: no cover
- import chardet # type: ignore[no-redef]
+ import charset_normalizer as chardet # type: ignore[no-redef]
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
@@ -481,8 +481,6 @@ def update_proxy(
proxy_auth: Optional[BasicAuth],
proxy_headers: Optional[LooseHeaders],
) -> None:
- if proxy and not proxy.scheme == "http":
- raise ValueError("Only http proxies are supported")
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
self.proxy = proxy
diff --git a/aiohttp/connector.py b/aiohttp/connector.py
index 58c528de6fb..f1cae659f5e 100644
--- a/aiohttp/connector.py
+++ b/aiohttp/connector.py
@@ -150,7 +150,7 @@ def closed(self) -> bool:
class _TransportPlaceholder:
- """ placeholder for BaseConnector.connect function """
+ """placeholder for BaseConnector.connect function"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
fut = loop.create_future()
@@ -876,9 +876,11 @@ def _make_ssl_context(verified: bool) -> SSLContext:
if verified:
return ssl.create_default_context()
else:
- sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.options |= ssl.OP_NO_SSLv3
+ sslcontext.check_hostname = False
+ sslcontext.verify_mode = ssl.CERT_NONE
try:
sslcontext.options |= ssl.OP_NO_COMPRESSION
except AttributeError as attr_err:
@@ -951,6 +953,100 @@ async def _wrap_create_connection(
except OSError as exc:
raise client_error(req.connection_key, exc) from exc
+ def _warn_about_tls_in_tls(
+ self,
+ underlying_transport: asyncio.Transport,
+ req: "ClientRequest",
+ ) -> None:
+ """Issue a warning if the requested URL has HTTPS scheme."""
+ if req.request_info.url.scheme != "https":
+ return
+
+ asyncio_supports_tls_in_tls = getattr(
+ underlying_transport,
+ "_start_tls_compatible",
+ False,
+ )
+
+ if asyncio_supports_tls_in_tls:
+ return
+
+ warnings.warn(
+ "An HTTPS request is being sent through an HTTPS proxy. "
+ "This support for TLS in TLS is known to be disabled "
+ "in the stdlib asyncio. This is why you'll probably see "
+ "an error in the log below.\n\n"
+ "It is possible to enable it via monkeypatching under "
+ "Python 3.7 or higher. For more details, see:\n"
+ "* https://bugs.python.org/issue37179\n"
+ "* https://github.com/python/cpython/pull/28073\n\n"
+ "You can temporarily patch this as follows:\n"
+ "* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
+ "* https://github.com/aio-libs/aiohttp/discussions/6044\n",
+ RuntimeWarning,
+ source=self,
+ # Why `4`? At least 3 of the calls in the stack originate
+ # from the methods in this class.
+ stacklevel=3,
+ )
+
+ async def _start_tls_connection(
+ self,
+ underlying_transport: asyncio.Transport,
+ req: "ClientRequest",
+ timeout: "ClientTimeout",
+ client_error: Type[Exception] = ClientConnectorError,
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
+ """Wrap the raw TCP transport with TLS."""
+ tls_proto = self._factory() # Create a brand new proto for TLS
+
+ # Safety of the `cast()` call here is based on the fact that
+ # internally `_get_ssl_context()` only returns `None` when
+ # `req.is_ssl()` evaluates to `False` which is never gonna happen
+ # in this code path. Of course, it's rather fragile
+ # maintainability-wise but this is to be solved separately.
+ sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
+
+ try:
+ async with ceil_timeout(timeout.sock_connect):
+ try:
+ tls_transport = await self._loop.start_tls(
+ underlying_transport,
+ tls_proto,
+ sslcontext,
+ server_hostname=req.host,
+ ssl_handshake_timeout=timeout.total,
+ )
+ except BaseException:
+ # We need to close the underlying transport since
+ # `start_tls()` probably failed before it had a
+ # chance to do this:
+ underlying_transport.close()
+ raise
+ except cert_errors as exc:
+ raise ClientConnectorCertificateError(req.connection_key, exc) from exc
+ except ssl_errors as exc:
+ raise ClientConnectorSSLError(req.connection_key, exc) from exc
+ except OSError as exc:
+ raise client_error(req.connection_key, exc) from exc
+ except TypeError as type_err:
+ # Example cause looks like this:
+ # TypeError: transport is not supported by start_tls()
+
+ raise ClientConnectionError(
+ "Cannot initialize a TLS-in-TLS connection to host "
+ f"{req.host!s}:{req.port:d} through an underlying connection "
+ f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
+ f"[{type_err!s}]"
+ ) from type_err
+ else:
+ tls_proto.connection_made(
+ tls_transport
+ ) # Kick the state machine of the new TLS protocol
+
+ return tls_transport, tls_proto
+
async def _create_direct_connection(
self,
req: "ClientRequest",
@@ -1028,7 +1124,7 @@ def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
async def _create_proxy_connection(
self, req: "ClientRequest", traces: List["Trace"], timeout: "ClientTimeout"
- ) -> Tuple[asyncio.Transport, ResponseHandler]:
+ ) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
headers = {} # type: Dict[str, str]
if req.proxy_headers is not None:
headers = req.proxy_headers # type: ignore[assignment]
@@ -1063,7 +1159,8 @@ async def _create_proxy_connection(
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
if req.is_ssl():
- sslcontext = self._get_ssl_context(req)
+ self._warn_about_tls_in_tls(transport, req)
+
# For HTTPS requests over HTTP proxy
# we must notify proxy to tunnel connection
# so we send CONNECT command:
@@ -1083,7 +1180,11 @@ async def _create_proxy_connection(
try:
protocol = conn._protocol
assert protocol is not None
- protocol.set_response_params()
+
+ # read_until_eof=True will ensure the connection isn't closed
+ # once the response is received and processed allowing
+ # START_TLS to work on the connection below.
+ protocol.set_response_params(read_until_eof=True)
resp = await proxy_resp.start(conn)
except BaseException:
proxy_resp.close()
@@ -1104,21 +1205,19 @@ async def _create_proxy_connection(
message=message,
headers=resp.headers,
)
- rawsock = transport.get_extra_info("socket", default=None)
- if rawsock is None:
- raise RuntimeError("Transport does not expose socket instance")
- # Duplicate the socket, so now we can close proxy transport
- rawsock = rawsock.dup()
- finally:
+ except BaseException:
+ # It shouldn't be closed in `finally` because it's fed to
+ # `loop.start_tls()` and the docs say not to touch it after
+ # passing there.
transport.close()
+ raise
- transport, proto = await self._wrap_create_connection(
- self._factory,
- timeout=timeout,
- ssl=sslcontext,
- sock=rawsock,
- server_hostname=req.host,
+ return await self._start_tls_connection(
+ # Access the old transport for the last time before it's
+ # closed and forgotten forever:
+ transport,
req=req,
+ timeout=timeout,
)
finally:
proxy_resp.close()
diff --git a/aiohttp/cookiejar.py b/aiohttp/cookiejar.py
index 8d0a1edf49b..ef32f5faa53 100644
--- a/aiohttp/cookiejar.py
+++ b/aiohttp/cookiejar.py
@@ -320,7 +320,7 @@ def _parse_date(cls, date_str: str) -> Optional[datetime.datetime]:
time_match = cls.DATE_HMS_TIME_RE.match(token)
if time_match:
found_time = True
- hour, minute, second = [int(s) for s in time_match.groups()]
+ hour, minute, second = (int(s) for s in time_match.groups())
continue
if not found_day:
diff --git a/aiohttp/helpers.py b/aiohttp/helpers.py
index 418de0f6f9a..948b12f99d8 100644
--- a/aiohttp/helpers.py
+++ b/aiohttp/helpers.py
@@ -13,6 +13,7 @@
import re
import sys
import time
+import warnings
import weakref
from collections import namedtuple
from contextlib import suppress
@@ -54,7 +55,9 @@
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
PY_38 = sys.version_info >= (3, 8)
+PY_310 = sys.version_info >= (3, 10)
+COOKIE_MAX_LENGTH = 4096
try:
from typing import ContextManager
@@ -499,7 +502,7 @@ def _is_ip_address(
elif isinstance(host, (bytes, bytearray, memoryview)):
return bool(regexb.match(host))
else:
- raise TypeError("{} [{}] is not a str or bytes".format(host, type(host)))
+ raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
@@ -593,7 +596,7 @@ def call_later(
class TimeoutHandle:
- """ Timeout handle """
+ """Timeout handle"""
def __init__(
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
@@ -656,7 +659,7 @@ def __exit__(
class TimerContext(BaseTimerContext):
- """ Low resolution timeout context manager """
+ """Low resolution timeout context manager"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
@@ -876,6 +879,15 @@ def set_cookie(
if samesite is not None:
c["samesite"] = samesite
+ if DEBUG:
+ cookie_length = len(c.output(header="")[1:])
+ if cookie_length > COOKIE_MAX_LENGTH:
+ warnings.warn(
+ "The size of is too large, it might get ignored by the client.",
+ UserWarning,
+ stacklevel=2,
+ )
+
def del_cookie(
self, name: str, *, domain: Optional[str] = None, path: str = "/"
) -> None:
diff --git a/aiohttp/http_parser.py b/aiohttp/http_parser.py
index 1045b6c0926..3feac7ac1bb 100644
--- a/aiohttp/http_parser.py
+++ b/aiohttp/http_parser.py
@@ -316,7 +316,7 @@ def feed_data(
finally:
self._lines.clear()
- def get_content_lenght() -> Optional[int]:
+ def get_content_length() -> Optional[int]:
# payload length
length_hdr = msg.headers.get(CONTENT_LENGTH)
if length_hdr is None:
@@ -332,7 +332,7 @@ def get_content_lenght() -> Optional[int]:
return length
- length = get_content_lenght()
+ length = get_content_length()
# do not support old websocket spec
if SEC_WEBSOCKET_KEY1 in msg.headers:
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
diff --git a/aiohttp/http_writer.py b/aiohttp/http_writer.py
index f859790efdd..428a7929b1a 100644
--- a/aiohttp/http_writer.py
+++ b/aiohttp/http_writer.py
@@ -171,13 +171,19 @@ async def drain(self) -> None:
await self._protocol._drain_helper()
+def _safe_header(string: str) -> str:
+ if "\r" in string or "\n" in string:
+ raise ValueError(
+ "Newline or carriage return detected in headers. "
+ "Potential header injection attack."
+ )
+ return string
+
+
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
- line = (
- status_line
- + "\r\n"
- + "".join([k + ": " + v + "\r\n" for k, v in headers.items()])
- )
- return line.encode("utf-8") + b"\r\n"
+ headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
+ line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
+ return line.encode("utf-8")
_serialize_headers = _py_serialize_headers
diff --git a/aiohttp/locks.py b/aiohttp/locks.py
index ce5b9c6f731..8ea99d70ce8 100644
--- a/aiohttp/locks.py
+++ b/aiohttp/locks.py
@@ -40,6 +40,6 @@ async def wait(self) -> Any:
return val
def cancel(self) -> None:
- """ Cancel all waiters """
+ """Cancel all waiters"""
for waiter in self._waiters:
waiter.cancel()
diff --git a/aiohttp/multipart.py b/aiohttp/multipart.py
index b5e78c835e6..b2c67baa45d 100644
--- a/aiohttp/multipart.py
+++ b/aiohttp/multipart.py
@@ -154,7 +154,7 @@ def unescape(text: str, *, chars: str = "".join(map(re.escape, CHAR))) -> str:
elif parts:
# maybe just ; in filename, in any case this is just
# one case fix, for proper fix we need to redesign parser
- _value = "{};{}".format(value, parts[0])
+ _value = f"{value};{parts[0]}"
if is_quoted(_value):
parts.pop(0)
value = unescape(_value[1:-1].lstrip("\\/"))
diff --git a/aiohttp/payload.py b/aiohttp/payload.py
index b88da2cd8ed..ace3dc2b995 100644
--- a/aiohttp/payload.py
+++ b/aiohttp/payload.py
@@ -15,7 +15,6 @@
Dict,
Iterable,
Optional,
- Text,
TextIO,
Tuple,
Type,
@@ -221,9 +220,7 @@ async def write(self, writer: AbstractStreamWriter) -> None:
class BytesPayload(Payload):
def __init__(self, value: ByteString, *args: Any, **kwargs: Any) -> None:
if not isinstance(value, (bytes, bytearray, memoryview)):
- raise TypeError(
- "value argument must be byte-ish, not {!r}".format(type(value))
- )
+ raise TypeError(f"value argument must be byte-ish, not {type(value)!r}")
if "content_type" not in kwargs:
kwargs["content_type"] = "application/octet-stream"
@@ -251,7 +248,7 @@ async def write(self, writer: AbstractStreamWriter) -> None:
class StringPayload(BytesPayload):
def __init__(
self,
- value: Text,
+ value: str,
*args: Any,
encoding: Optional[str] = None,
content_type: Optional[str] = None,
diff --git a/aiohttp/streams.py b/aiohttp/streams.py
index a077b81b82d..185f46ecdab 100644
--- a/aiohttp/streams.py
+++ b/aiohttp/streams.py
@@ -480,7 +480,7 @@ def _read_nowait_chunk(self, n: int) -> bytes:
return data
def _read_nowait(self, n: int) -> bytes:
- """ Read not more than n bytes, or whole buffer if n == -1 """
+ """Read not more than n bytes, or whole buffer if n == -1"""
chunks = []
while self._buffer:
diff --git a/aiohttp/test_utils.py b/aiohttp/test_utils.py
index e8ebb6d4ed0..48c276791f8 100644
--- a/aiohttp/test_utils.py
+++ b/aiohttp/test_utils.py
@@ -428,8 +428,10 @@ def get_app(self) -> Application:
raise RuntimeError("Did you forget to define get_application()?")
def setUp(self) -> None:
- if PY_38:
- self.loop = asyncio.get_event_loop()
+ try:
+ self.loop = asyncio.get_running_loop()
+ except RuntimeError:
+ self.loop = asyncio.get_event_loop_policy().get_event_loop()
self.loop.run_until_complete(self.setUpAsync())
@@ -490,7 +492,16 @@ def setup_test_loop(
asyncio.set_event_loop(loop)
if sys.platform != "win32" and not skip_watcher:
policy = asyncio.get_event_loop_policy()
- watcher = asyncio.SafeChildWatcher()
+ watcher: asyncio.AbstractChildWatcher
+ try: # Python >= 3.8
+ # Refs:
+ # * https://github.com/pytest-dev/pytest-xdist/issues/620
+ # * https://stackoverflow.com/a/58614689/595220
+ # * https://bugs.python.org/issue35621
+ # * https://github.com/python/cpython/pull/14344
+ watcher = asyncio.ThreadedChildWatcher()
+ except AttributeError: # Python < 3.8
+ watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(loop)
with contextlib.suppress(NotImplementedError):
policy.set_child_watcher(watcher)
diff --git a/aiohttp/tracing.py b/aiohttp/tracing.py
index 435bf3ddf73..7ffe93f8507 100644
--- a/aiohttp/tracing.py
+++ b/aiohttp/tracing.py
@@ -107,7 +107,7 @@ def __init__(
def trace_config_ctx(
self, trace_request_ctx: Optional[SimpleNamespace] = None
) -> SimpleNamespace:
- """ Return a new trace_config_ctx instance """
+ """Return a new trace_config_ctx instance"""
return self._trace_config_ctx_factory(trace_request_ctx=trace_request_ctx)
def freeze(self) -> None:
@@ -219,7 +219,7 @@ def on_request_headers_sent(
@dataclasses.dataclass(frozen=True)
class TraceRequestStartParams:
- """ Parameters sent by the `on_request_start` signal"""
+ """Parameters sent by the `on_request_start` signal"""
method: str
url: URL
@@ -228,7 +228,7 @@ class TraceRequestStartParams:
@dataclasses.dataclass(frozen=True)
class TraceRequestChunkSentParams:
- """ Parameters sent by the `on_request_chunk_sent` signal"""
+ """Parameters sent by the `on_request_chunk_sent` signal"""
method: str
url: URL
@@ -237,7 +237,7 @@ class TraceRequestChunkSentParams:
@dataclasses.dataclass(frozen=True)
class TraceResponseChunkReceivedParams:
- """ Parameters sent by the `on_response_chunk_received` signal"""
+ """Parameters sent by the `on_response_chunk_received` signal"""
method: str
url: URL
@@ -246,7 +246,7 @@ class TraceResponseChunkReceivedParams:
@dataclasses.dataclass(frozen=True)
class TraceRequestEndParams:
- """ Parameters sent by the `on_request_end` signal"""
+ """Parameters sent by the `on_request_end` signal"""
method: str
url: URL
@@ -256,7 +256,7 @@ class TraceRequestEndParams:
@dataclasses.dataclass(frozen=True)
class TraceRequestExceptionParams:
- """ Parameters sent by the `on_request_exception` signal"""
+ """Parameters sent by the `on_request_exception` signal"""
method: str
url: URL
@@ -266,7 +266,7 @@ class TraceRequestExceptionParams:
@dataclasses.dataclass(frozen=True)
class TraceRequestRedirectParams:
- """ Parameters sent by the `on_request_redirect` signal"""
+ """Parameters sent by the `on_request_redirect` signal"""
method: str
url: URL
@@ -276,60 +276,60 @@ class TraceRequestRedirectParams:
@dataclasses.dataclass(frozen=True)
class TraceConnectionQueuedStartParams:
- """ Parameters sent by the `on_connection_queued_start` signal"""
+ """Parameters sent by the `on_connection_queued_start` signal"""
@dataclasses.dataclass(frozen=True)
class TraceConnectionQueuedEndParams:
- """ Parameters sent by the `on_connection_queued_end` signal"""
+ """Parameters sent by the `on_connection_queued_end` signal"""
@dataclasses.dataclass(frozen=True)
class TraceConnectionCreateStartParams:
- """ Parameters sent by the `on_connection_create_start` signal"""
+ """Parameters sent by the `on_connection_create_start` signal"""
@dataclasses.dataclass(frozen=True)
class TraceConnectionCreateEndParams:
- """ Parameters sent by the `on_connection_create_end` signal"""
+ """Parameters sent by the `on_connection_create_end` signal"""
@dataclasses.dataclass(frozen=True)
class TraceConnectionReuseconnParams:
- """ Parameters sent by the `on_connection_reuseconn` signal"""
+ """Parameters sent by the `on_connection_reuseconn` signal"""
@dataclasses.dataclass(frozen=True)
class TraceDnsResolveHostStartParams:
- """ Parameters sent by the `on_dns_resolvehost_start` signal"""
+ """Parameters sent by the `on_dns_resolvehost_start` signal"""
host: str
@dataclasses.dataclass(frozen=True)
class TraceDnsResolveHostEndParams:
- """ Parameters sent by the `on_dns_resolvehost_end` signal"""
+ """Parameters sent by the `on_dns_resolvehost_end` signal"""
host: str
@dataclasses.dataclass(frozen=True)
class TraceDnsCacheHitParams:
- """ Parameters sent by the `on_dns_cache_hit` signal"""
+ """Parameters sent by the `on_dns_cache_hit` signal"""
host: str
@dataclasses.dataclass(frozen=True)
class TraceDnsCacheMissParams:
- """ Parameters sent by the `on_dns_cache_miss` signal"""
+ """Parameters sent by the `on_dns_cache_miss` signal"""
host: str
@dataclasses.dataclass(frozen=True)
class TraceRequestHeadersSentParams:
- """ Parameters sent by the `on_request_headers_sent` signal"""
+ """Parameters sent by the `on_request_headers_sent` signal"""
method: str
url: URL
diff --git a/aiohttp/typedefs.py b/aiohttp/typedefs.py
index ab2d7035f2e..1b13a4dbd0f 100644
--- a/aiohttp/typedefs.py
+++ b/aiohttp/typedefs.py
@@ -1,6 +1,15 @@
import json
import os
-from typing import TYPE_CHECKING, Any, Callable, Iterable, Mapping, Tuple, Union
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Awaitable,
+ Callable,
+ Iterable,
+ Mapping,
+ Tuple,
+ Union,
+)
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy, istr
from yarl import URL
@@ -14,6 +23,8 @@
_MultiDict = MultiDict[str]
_MultiDictProxy = MultiDictProxy[str]
from http.cookies import BaseCookie, Morsel
+
+ from .web import Request, StreamResponse
else:
_CIMultiDict = CIMultiDict
_CIMultiDictProxy = CIMultiDictProxy
@@ -37,5 +48,6 @@
"BaseCookie[str]",
]
+Handler = Callable[["Request"], Awaitable["StreamResponse"]]
PathLike = Union[str, "os.PathLike[str]"]
diff --git a/aiohttp/web.py b/aiohttp/web.py
index 0062076268f..5aef0c00e5f 100644
--- a/aiohttp/web.py
+++ b/aiohttp/web.py
@@ -441,9 +441,7 @@ def _cancel_tasks(
for task in to_cancel:
task.cancel()
- loop.run_until_complete(
- asyncio.gather(*to_cancel, loop=loop, return_exceptions=True)
- )
+ loop.run_until_complete(asyncio.gather(*to_cancel, return_exceptions=True))
for task in to_cancel:
if task.cancelled():
diff --git a/aiohttp/web_app.py b/aiohttp/web_app.py
index 15d3c430e5e..999ea9ceb95 100644
--- a/aiohttp/web_app.py
+++ b/aiohttp/web_app.py
@@ -46,10 +46,11 @@
if TYPE_CHECKING: # pragma: no cover
+ from .typedefs import Handler
+
_AppSignal = Signal[Callable[["Application"], Awaitable[None]]]
_RespPrepareSignal = Signal[Callable[[Request, StreamResponse], Awaitable[None]]]
- _Handler = Callable[[Request], Awaitable[StreamResponse]]
- _Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]]
+ _Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
_Middlewares = FrozenList[_Middleware]
_MiddlewaresHandlers = Sequence[_Middleware]
_Subapps = List["Application"]
@@ -358,7 +359,7 @@ def __call__(self) -> "Application":
return self
def __repr__(self) -> str:
- return "".format(id(self))
+ return f""
def __bool__(self) -> bool:
return True
diff --git a/aiohttp/web_exceptions.py b/aiohttp/web_exceptions.py
index 28fe0aafcc5..b22995f39ac 100644
--- a/aiohttp/web_exceptions.py
+++ b/aiohttp/web_exceptions.py
@@ -431,7 +431,7 @@ def __init__(
super().__init__(
headers=headers, reason=reason, text=text, content_type=content_type
)
- self.headers["Link"] = '<{}>; rel="blocked-by"'.format(str(link))
+ self.headers["Link"] = f'<{str(link)}>; rel="blocked-by"'
self._link = URL(link)
@property
diff --git a/aiohttp/web_middlewares.py b/aiohttp/web_middlewares.py
index 3b040e8499e..4d28ff76307 100644
--- a/aiohttp/web_middlewares.py
+++ b/aiohttp/web_middlewares.py
@@ -2,6 +2,7 @@
import warnings
from typing import TYPE_CHECKING, Awaitable, Callable, Tuple, Type, TypeVar
+from .typedefs import Handler
from .web_exceptions import HTTPMove, HTTPPermanentRedirect
from .web_request import Request
from .web_response import StreamResponse
@@ -41,8 +42,7 @@ def middleware(f: _Func) -> _Func:
return f
-_Handler = Callable[[Request], Awaitable[StreamResponse]]
-_Middleware = Callable[[Request, _Handler], Awaitable[StreamResponse]]
+_Middleware = Callable[[Request, Handler], Awaitable[StreamResponse]]
def normalize_path_middleware(
@@ -85,7 +85,7 @@ def normalize_path_middleware(
correct_configuration = not (append_slash and remove_slash)
assert correct_configuration, "Cannot both remove and append slash"
- async def impl(request: Request, handler: _Handler) -> StreamResponse:
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
if isinstance(request.match_info.route, SystemRoute):
paths_to_check = []
if "?" in request.raw_path:
@@ -119,7 +119,7 @@ async def impl(request: Request, handler: _Handler) -> StreamResponse:
def _fix_request_current_app(app: "Application") -> _Middleware:
- async def impl(request: Request, handler: _Handler) -> StreamResponse:
+ async def impl(request: Request, handler: Handler) -> StreamResponse:
with request.match_info.set_current_app(app):
return await handler(request)
diff --git a/aiohttp/web_protocol.py b/aiohttp/web_protocol.py
index f2f0d8866c3..aab4f31f297 100644
--- a/aiohttp/web_protocol.py
+++ b/aiohttp/web_protocol.py
@@ -1,5 +1,6 @@
import asyncio
import asyncio.streams
+import dataclasses
import traceback
from collections import deque
from contextlib import suppress
@@ -20,7 +21,6 @@
cast,
)
-import attr
import yarl
from .abc import AbstractAccessLogger, AbstractAsyncAccessLogger, AbstractStreamWriter
@@ -105,7 +105,7 @@ async def log(
self.access_logger.log(request, response, self._loop.time() - request_start)
-@attr.s(auto_attribs=True, frozen=True, slots=True)
+@dataclasses.dataclass(frozen=True)
class _ErrInfo:
status: int
exc: BaseException
diff --git a/aiohttp/web_routedef.py b/aiohttp/web_routedef.py
index 3ecbd943fc3..a22c3df4d0e 100644
--- a/aiohttp/web_routedef.py
+++ b/aiohttp/web_routedef.py
@@ -4,7 +4,6 @@
from typing import (
TYPE_CHECKING,
Any,
- Awaitable,
Callable,
Dict,
Iterator,
@@ -18,7 +17,7 @@
from . import hdrs
from .abc import AbstractView
-from .typedefs import PathLike
+from .typedefs import Handler, PathLike
if TYPE_CHECKING: # pragma: no cover
from .web_request import Request
@@ -52,8 +51,7 @@ def register(self, router: UrlDispatcher) -> List[AbstractRoute]:
pass # pragma: no cover
-_SimpleHandler = Callable[[Request], Awaitable[StreamResponse]]
-_HandlerType = Union[Type[AbstractView], _SimpleHandler]
+_HandlerType = Union[Type[AbstractView], Handler]
@dataclasses.dataclass(frozen=True, repr=False)
@@ -160,7 +158,7 @@ def __init__(self) -> None:
self._items = [] # type: List[AbstractRouteDef]
def __repr__(self) -> str:
- return "".format(len(self._items))
+ return f""
@overload
def __getitem__(self, index: int) -> AbstractRouteDef:
diff --git a/aiohttp/web_urldispatcher.py b/aiohttp/web_urldispatcher.py
index a229cd7e4ed..3e9a2c22392 100644
--- a/aiohttp/web_urldispatcher.py
+++ b/aiohttp/web_urldispatcher.py
@@ -36,7 +36,7 @@
from .abc import AbstractMatchInfo, AbstractRouter, AbstractView
from .helpers import DEBUG, iscoroutinefunction
from .http import HttpVersion11
-from .typedefs import PathLike
+from .typedefs import Handler, PathLike
from .web_exceptions import (
HTTPException,
HTTPExpectationFailed,
@@ -81,7 +81,6 @@
PATH_SEP: Final[str] = re.escape("/")
-_WebHandler = Callable[[Request], Awaitable[StreamResponse]]
_ExpectHandler = Callable[[Request], Awaitable[None]]
_Resolve = Tuple[Optional[AbstractMatchInfo], Set[str]]
@@ -156,7 +155,7 @@ class AbstractRoute(abc.ABC):
def __init__(
self,
method: str,
- handler: Union[_WebHandler, Type[AbstractView]],
+ handler: Union[Handler, Type[AbstractView]],
*,
expect_handler: Optional[_ExpectHandler] = None,
resource: Optional[AbstractResource] = None,
@@ -193,7 +192,7 @@ def method(self) -> str:
return self._method
@property
- def handler(self) -> _WebHandler:
+ def handler(self) -> Handler:
return self._handler
@property
@@ -226,7 +225,7 @@ def __init__(self, match_dict: Dict[str, str], route: AbstractRoute):
self._frozen = False
@property
- def handler(self) -> _WebHandler:
+ def handler(self) -> Handler:
return self._route.handler
@property
@@ -321,7 +320,7 @@ def __init__(self, *, name: Optional[str] = None) -> None:
def add_route(
self,
method: str,
- handler: Union[Type[AbstractView], _WebHandler],
+ handler: Union[Type[AbstractView], Handler],
*,
expect_handler: Optional[_ExpectHandler] = None,
) -> "ResourceRoute":
@@ -606,7 +605,7 @@ def get_info(self) -> _InfoDict:
"routes": self._routes,
}
- def set_options_route(self, handler: _WebHandler) -> None:
+ def set_options_route(self, handler: Handler) -> None:
if "OPTIONS" in self._routes:
raise RuntimeError("OPTIONS route was set already")
self._routes["OPTIONS"] = ResourceRoute(
@@ -863,7 +862,7 @@ class ResourceRoute(AbstractRoute):
def __init__(
self,
method: str,
- handler: Union[_WebHandler, Type[AbstractView]],
+ handler: Union[Handler, Type[AbstractView]],
resource: AbstractResource,
*,
expect_handler: Optional[_ExpectHandler] = None,
@@ -1073,7 +1072,7 @@ def add_route(
self,
method: str,
path: str,
- handler: Union[_WebHandler, Type[AbstractView]],
+ handler: Union[Handler, Type[AbstractView]],
*,
name: Optional[str] = None,
expect_handler: Optional[_ExpectHandler] = None,
@@ -1115,15 +1114,13 @@ def add_static(
self.register_resource(resource)
return resource
- def add_head(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute:
+ def add_head(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method HEAD
"""
return self.add_route(hdrs.METH_HEAD, path, handler, **kwargs)
- def add_options(
- self, path: str, handler: _WebHandler, **kwargs: Any
- ) -> AbstractRoute:
+ def add_options(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method OPTIONS
"""
@@ -1132,7 +1129,7 @@ def add_options(
def add_get(
self,
path: str,
- handler: _WebHandler,
+ handler: Handler,
*,
name: Optional[str] = None,
allow_head: bool = True,
@@ -1147,29 +1144,25 @@ def add_get(
resource.add_route(hdrs.METH_HEAD, handler, **kwargs)
return resource.add_route(hdrs.METH_GET, handler, **kwargs)
- def add_post(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute:
+ def add_post(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method POST
"""
return self.add_route(hdrs.METH_POST, path, handler, **kwargs)
- def add_put(self, path: str, handler: _WebHandler, **kwargs: Any) -> AbstractRoute:
+ def add_put(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method PUT
"""
return self.add_route(hdrs.METH_PUT, path, handler, **kwargs)
- def add_patch(
- self, path: str, handler: _WebHandler, **kwargs: Any
- ) -> AbstractRoute:
+ def add_patch(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method PATCH
"""
return self.add_route(hdrs.METH_PATCH, path, handler, **kwargs)
- def add_delete(
- self, path: str, handler: _WebHandler, **kwargs: Any
- ) -> AbstractRoute:
+ def add_delete(self, path: str, handler: Handler, **kwargs: Any) -> AbstractRoute:
"""
Shortcut for add_route with method DELETE
"""
diff --git a/docs/abc.rst b/docs/abc.rst
index e131d794c51..e35bdd62c3d 100644
--- a/docs/abc.rst
+++ b/docs/abc.rst
@@ -161,7 +161,7 @@ Abstract Cookie Jar
.. versionadded:: 3.8
-Abstract Abstract Access Logger
+Abstract Access Logger
-------------------------------
.. class:: AbstractAccessLogger
diff --git a/docs/aiohttp-icon.svg b/docs/aiohttp-icon.svg
index 9e2009f2994..0b3ebacb0bf 100644
--- a/docs/aiohttp-icon.svg
+++ b/docs/aiohttp-icon.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/aiohttp-plain.svg b/docs/aiohttp-plain.svg
index 520727a369c..aec1b00c1e5 100644
--- a/docs/aiohttp-plain.svg
+++ b/docs/aiohttp-plain.svg
@@ -1 +1 @@
-
\ No newline at end of file
+
diff --git a/docs/built_with.rst b/docs/built_with.rst
index 3263f60d002..0a4c036adf5 100644
--- a/docs/built_with.rst
+++ b/docs/built_with.rst
@@ -27,3 +27,4 @@ project, pointing to ``_.
* `Mariner `_ Command-line torrent searcher.
* `DEEPaaS API `_ REST API for Machine learning, Deep learning and artificial intelligence applications.
* `BentoML `_ Machine Learning model serving framework
+* `salted `_ fast link check library (for HTML, Markdown, LaTeX, ...) with CLI
diff --git a/docs/changes.rst b/docs/changes.rst
index 0ecf1d76af8..6a61dfbcc1e 100644
--- a/docs/changes.rst
+++ b/docs/changes.rst
@@ -1,5 +1,17 @@
.. _aiohttp_changes:
+=========
+Changelog
+=========
+
+To be included in v\ |release| (if present)
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. towncrier-draft-entries:: |release| [UNRELEASED DRAFT]
+
+Released versions
+^^^^^^^^^^^^^^^^^
+
.. include:: ../CHANGES.rst
.. include:: ../HISTORY.rst
diff --git a/docs/client_advanced.rst b/docs/client_advanced.rst
index 40fd2fca728..316f41cd1f3 100644
--- a/docs/client_advanced.rst
+++ b/docs/client_advanced.rst
@@ -56,6 +56,17 @@ For *text/plain* ::
await session.post(url, data='Привет, Мир!')
+.. note::
+
+ ``Authorization`` header will be removed if you get redirected
+ to a different host or protocol, except the case when ``HTTP -> HTTPS``
+ redirect is performed on the same host.
+
+.. versionchanged:: 4.0
+
+ Started keeping the ``Authorization`` header during ``HTTP -> HTTPS``
+ redirects when the host remains the same.
+
Custom Cookies
--------------
@@ -522,9 +533,11 @@ DER with e.g::
Proxy support
-------------
-aiohttp supports plain HTTP proxies and HTTP proxies that can be upgraded to HTTPS
-via the HTTP CONNECT method. aiohttp does not support proxies that must be
-connected to via ``https://``. To connect, use the *proxy* parameter::
+aiohttp supports plain HTTP proxies and HTTP proxies that can be
+upgraded to HTTPS via the HTTP CONNECT method. aiohttp has a limited
+support for proxies that must be connected to via ``https://`` — see
+the info box below for more details.
+To connect, use the *proxy* parameter::
async with aiohttp.ClientSession() as session:
async with session.get("http://python.org",
@@ -559,6 +572,33 @@ variables* (all are case insensitive)::
Proxy credentials are given from ``~/.netrc`` file if present (see
:class:`aiohttp.ClientSession` for more details).
+.. attention::
+
+ CPython has introduced the support for TLS in TLS around Python 3.7.
+ But, as of now (Python 3.10), it's disabled for the transports that
+ :py:mod:`asyncio` uses. If the further release of Python (say v3.11)
+ toggles one attribute, it'll *just work™*.
+
+ aiohttp v3.8 and higher is ready for this to happen and has code in
+ place supports TLS-in-TLS, hence sending HTTPS requests over HTTPS
+ proxy tunnels.
+
+ ⚠️ For as long as your Python runtime doesn't declare the support for
+ TLS-in-TLS, please don't file bugs with aiohttp but rather try to
+ help the CPython upstream enable this feature. Meanwhile, if you
+ *really* need this to work, there's a patch that may help you make
+ it happen, include it into your app's code base:
+ https://github.com/aio-libs/aiohttp/discussions/6044#discussioncomment-1432443.
+
+.. important::
+
+ When supplying a custom :py:class:`ssl.SSLContext` instance, bear in
+ mind that it will be used not only to establish a TLS session with
+ the HTTPS endpoint you're hitting but also to establish a TLS tunnel
+ to the HTTPS proxy. To avoid surprises, make sure to set up the trust
+ chain that would recognize TLS certificates used by both the endpoint
+ and the proxy.
+
.. _aiohttp-persistent-session:
Persistent session
diff --git a/docs/client_reference.rst b/docs/client_reference.rst
index ed935a2da1a..86bad7f0c95 100644
--- a/docs/client_reference.rst
+++ b/docs/client_reference.rst
@@ -1374,10 +1374,10 @@ Response object
specified *encoding* parameter.
If *encoding* is ``None`` content encoding is autocalculated
- using ``Content-Type`` HTTP header and *chardet* tool if the
+ using ``Content-Type`` HTTP header and *charset-normalizer* tool if the
header is not provided by server.
- :term:`cchardet` is used with fallback to :term:`chardet` if
+ :term:`cchardet` is used with fallback to :term:`charset-normalizer` if
*cchardet* is not available.
Close underlying connection if data reading gets an error,
@@ -1389,14 +1389,14 @@ Response object
:return str: decoded *BODY*
- :raise LookupError: if the encoding detected by chardet or cchardet is
+ :raise LookupError: if the encoding detected by cchardet is
unknown by Python (e.g. VISCII).
.. note::
If response has no ``charset`` info in ``Content-Type`` HTTP
- header :term:`cchardet` / :term:`chardet` is used for content
- encoding autodetection.
+ header :term:`cchardet` / :term:`charset-normalizer` is used for
+ content encoding autodetection.
It may hurt performance. If page encoding is known passing
explicit *encoding* parameter might help::
@@ -1411,7 +1411,7 @@ Response object
a ``read`` call will be done,
If *encoding* is ``None`` content encoding is autocalculated
- using :term:`cchardet` or :term:`chardet` as fallback if
+ using :term:`cchardet` or :term:`charset-normalizer` as fallback if
*cchardet* is not available.
if response's `content-type` does not match `content_type` parameter
@@ -1449,11 +1449,11 @@ Response object
Automatically detect content encoding using ``charset`` info in
``Content-Type`` HTTP header. If this info is not exists or there
are no appropriate codecs for encoding then :term:`cchardet` /
- :term:`chardet` is used.
+ :term:`charset-normalizer` is used.
Beware that it is not always safe to use the result of this function to
decode a response. Some encodings detected by cchardet are not known by
- Python (e.g. VISCII).
+ Python (e.g. VISCII). *charset-normalizer* is not concerned by that issue.
:raise RuntimeError: if called before the body has been read,
for :term:`cchardet` usage
diff --git a/docs/conf.py b/docs/conf.py
index b7096306d42..b7d80de93dc 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -14,6 +14,9 @@
import os
import re
+from pathlib import Path
+
+PROJECT_ROOT_DIR = Path(__file__).parents[1].resolve()
_docs_path = os.path.dirname(__file__)
_version_path = os.path.abspath(
@@ -43,10 +46,14 @@
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
- "sphinx.ext.viewcode",
+ # stdlib-party extensions:
+ "sphinx.ext.extlinks",
"sphinx.ext.intersphinx",
+ "sphinx.ext.viewcode",
+ # Third-party extensions:
"sphinxcontrib.asyncio",
"sphinxcontrib.blockdiag",
+ "sphinxcontrib.towncrier", # provides `towncrier-draft-entries` directive
]
@@ -81,9 +88,17 @@
# The master toctree document.
master_doc = "index"
-# General information about the project.
-project = "aiohttp"
-copyright = "2013-2020, aiohttp maintainers"
+# -- Project information -----------------------------------------------------
+
+github_url = "https://github.com"
+github_repo_org = "aio-libs"
+github_repo_name = "aiohttp"
+github_repo_slug = f"{github_repo_org}/{github_repo_name}"
+github_repo_url = f"{github_url}/{github_repo_slug}"
+github_sponsors_url = f"{github_url}/sponsors"
+
+project = github_repo_name
+copyright = f"2013-2020, {project} maintainers"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -136,6 +151,17 @@
# keep_warnings = False
+# -- Extension configuration -------------------------------------------------
+
+# -- Options for extlinks extension ---------------------------------------
+extlinks = {
+ "issue": (f"{github_repo_url}/issues/%s", "#"),
+ "pr": (f"{github_repo_url}/pull/%s", "PR #"),
+ "commit": (f"{github_repo_url}/commit/%s", ""),
+ "gh": (f"{github_url}/%s", "GitHub: "),
+ "user": (f"{github_sponsors_url}/%s", "@"),
+}
+
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
@@ -148,39 +174,39 @@
html_theme_options = {
"description": "Async HTTP client/server for asyncio and Python",
"canonical_url": "http://docs.aiohttp.org/en/stable/",
- "github_user": "aio-libs",
- "github_repo": "aiohttp",
+ "github_user": github_repo_org,
+ "github_repo": github_repo_name,
"github_button": True,
"github_type": "star",
"github_banner": True,
"badges": [
{
- "image": "https://github.com/aio-libs/aiohttp/workflows/CI/badge.svg",
- "target": "https://github.com/aio-libs/aiohttp/actions?query=workflow%3ACI",
+ "image": f"{github_repo_url}/workflows/CI/badge.svg",
+ "target": f"{github_repo_url}/actions?query=workflow%3ACI",
"height": "20",
"alt": "Azure Pipelines CI status",
},
{
- "image": "https://codecov.io/github/aio-libs/aiohttp/coverage.svg?branch=master",
- "target": "https://codecov.io/github/aio-libs/aiohttp",
+ "image": f"https://codecov.io/github/{github_repo_slug}/coverage.svg?branch=master",
+ "target": f"https://codecov.io/github/{github_repo_slug}",
"height": "20",
"alt": "Code coverage status",
},
{
- "image": "https://badge.fury.io/py/aiohttp.svg",
- "target": "https://badge.fury.io/py/aiohttp",
+ "image": f"https://badge.fury.io/py/{project}.svg",
+ "target": f"https://badge.fury.io/py/{project}",
"height": "20",
"alt": "Latest PyPI package version",
},
{
- "image": "https://img.shields.io/discourse/status?server=https%3A%2F%2Faio-libs.discourse.group",
- "target": "https://aio-libs.discourse.group",
+ "image": f"https://img.shields.io/discourse/status?server=https%3A%2F%2F{github_repo_org}.discourse.group",
+ "target": f"https://{github_repo_org}.discourse.group",
"height": "20",
"alt": "Discourse status",
},
{
"image": "https://badges.gitter.im/Join%20Chat.svg",
- "target": "https://gitter.im/aio-libs/Lobby",
+ "target": f"https://gitter.im/{github_repo_org}/Lobby",
"height": "20",
"alt": "Chat on Gitter",
},
@@ -268,7 +294,7 @@
# html_file_suffix = None
# Output file base name for HTML help builder.
-htmlhelp_basename = "aiohttpdoc"
+htmlhelp_basename = f"{project}doc"
# -- Options for LaTeX output ---------------------------------------------
@@ -286,7 +312,13 @@
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
- ("index", "aiohttp.tex", "aiohttp Documentation", "aiohttp contributors", "manual"),
+ (
+ "index",
+ f"{project}.tex",
+ f"{project} Documentation",
+ f"{project} contributors",
+ "manual",
+ ),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -314,7 +346,7 @@
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
-man_pages = [("index", "aiohttp", "aiohttp Documentation", ["aiohttp"], 1)]
+man_pages = [("index", project, f"{project} Documentation", [project], 1)]
# If true, show URL addresses after external links.
# man_show_urls = False
@@ -328,10 +360,10 @@
texinfo_documents = [
(
"index",
- "aiohttp",
- "aiohttp Documentation",
+ project,
+ f"{project} Documentation",
"Aiohttp contributors",
- "aiohttp",
+ project,
"One line description of project.",
"Miscellaneous",
),
@@ -392,3 +424,10 @@
("py:meth", "aiohttp.web.UrlDispatcher.register_resource"), # undocumented
("py:func", "aiohttp_debugtoolbar.setup"), # undocumented
]
+
+# -- Options for towncrier_draft extension -----------------------------------
+
+towncrier_draft_autoversion_mode = "draft" # or: 'sphinx-version', 'sphinx-release'
+towncrier_draft_include_empty = True
+towncrier_draft_working_directory = PROJECT_ROOT_DIR
+# Not yet supported: towncrier_draft_config_path = 'pyproject.toml' # relative to cwd
diff --git a/docs/deployment.rst b/docs/deployment.rst
index e542a3409e2..60f218e848d 100644
--- a/docs/deployment.rst
+++ b/docs/deployment.rst
@@ -37,7 +37,7 @@ Nginx+supervisord
Running aiohttp servers behind :term:`nginx` makes several advantages.
-At first, nginx is the perfect frontend server. It may prevent many
+First, nginx is the perfect frontend server. It may prevent many
attacks based on malformed http protocol etc.
Second, running several aiohttp instances behind nginx allows to
@@ -51,10 +51,10 @@ But this way requires more complex configuration.
Nginx configuration
--------------------
-Here is short extraction about writing Nginx configuration file.
+Here is short example of an Nginx configuration file.
It does not cover all available Nginx options.
-For full reference read `Nginx tutorial
+For full details, read `Nginx tutorial
`_ and `official Nginx
documentation
`_.
@@ -86,8 +86,8 @@ First configure HTTP server itself:
}
}
-This config listens on port ``80`` for server named ``example.com``
-and redirects everything to ``aiohttp`` backend group.
+This config listens on port ``80`` for a server named ``example.com``
+and redirects everything to the ``aiohttp`` backend group.
Also it serves static files from ``/path/to/app/static`` path as
``example.com/static``.
@@ -124,20 +124,20 @@ selection.
.. note::
- Nginx is not the only existing *reverse proxy server* but the most
+ Nginx is not the only existing *reverse proxy server*, but it's the most
popular one. Alternatives like HAProxy may be used as well.
Supervisord
-----------
-After configuring Nginx we need to start our aiohttp backends. Better
-to use some tool for starting them automatically after system reboot
+After configuring Nginx we need to start our aiohttp backends. It's best
+to use some tool for starting them automatically after a system reboot
or backend crash.
-There are very many ways to do it: Supervisord, Upstart, Systemd,
+There are many ways to do it: Supervisord, Upstart, Systemd,
Gaffer, Circus, Runit etc.
-Here we'll use `Supervisord `_ for example:
+Here we'll use `Supervisord `_ as an example:
.. code-block:: cfg
@@ -159,7 +159,7 @@ Here we'll use `Supervisord `_ for example:
aiohttp server
--------------
-The last step is preparing aiohttp server for working with supervisord.
+The last step is preparing the aiohttp server to work with supervisord.
Assuming we have properly configured :class:`aiohttp.web.Application`
and port is specified by command line, the task is trivial:
@@ -196,17 +196,17 @@ aiohttp can be deployed using `Gunicorn
pre-fork worker model. Gunicorn launches your app as worker processes
for handling incoming requests.
-In opposite to deployment with :ref:`bare Nginx
-` the solution does not need to
-manually run several aiohttp processes and use tool like supervisord
-for monitoring it. But nothing is for free: running aiohttp
+As opposed to deployment with :ref:`bare Nginx
+`, this solution does not need to
+manually run several aiohttp processes and use a tool like supervisord
+to monitor them. But nothing is free: running aiohttp
application under gunicorn is slightly slower.
Prepare environment
-------------------
-You firstly need to setup your deployment environment. This example is
+You first need to setup your deployment environment. This example is
based on `Ubuntu `_ 16.04.
Create a directory for your application::
@@ -214,7 +214,7 @@ Create a directory for your application::
>> mkdir myapp
>> cd myapp
-Create Python virtual environment::
+Create a Python virtual environment::
>> python3 -m venv venv
>> source venv/bin/activate
diff --git a/docs/glossary.rst b/docs/glossary.rst
index c2da11817af..1de13dc7d04 100644
--- a/docs/glossary.rst
+++ b/docs/glossary.rst
@@ -45,11 +45,12 @@
Any object that can be called. Use :func:`callable` to check
that.
- chardet
+ charset-normalizer
- The Universal Character Encoding Detector
+ The Real First Universal Charset Detector.
+ Open, modern and actively maintained alternative to Chardet.
- https://pypi.python.org/pypi/chardet/
+ https://pypi.org/project/charset-normalizer/
cchardet
diff --git a/docs/index.rst b/docs/index.rst
index 78663bde26b..6be4898e029 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -34,7 +34,7 @@ Library Installation
$ pip install aiohttp
You may want to install *optional* :term:`cchardet` library as faster
-replacement for :term:`chardet`:
+replacement for :term:`charset-normalizer`:
.. code-block:: bash
@@ -51,7 +51,7 @@ This option is highly recommended:
Installing speedups altogether
------------------------------
-The following will get you ``aiohttp`` along with :term:`chardet`,
+The following will get you ``aiohttp`` along with :term:`charset-normalizer`,
:term:`aiodns` and ``Brotli`` in one bundle. No need to type
separate commands anymore!
@@ -148,12 +148,11 @@ Dependencies
- Python 3.7+
- *async_timeout*
-- *attrs*
-- *chardet*
+- *charset-normalizer*
- *multidict*
- *yarl*
- *Optional* :term:`cchardet` as faster replacement for
- :term:`chardet`.
+ :term:`charset-normalizer`.
Install it explicitly via:
diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt
index 4a18a93b0b5..c4182c2f06d 100644
--- a/docs/spelling_wordlist.txt
+++ b/docs/spelling_wordlist.txt
@@ -82,6 +82,7 @@ WSMsgType
Websockets
Workflow
abc
+addons
aiodns
aioes
aiohttp
@@ -117,11 +118,13 @@ brotli
bugfix
builtin
cChardet
+callables
cancelled
canonicalization
canonicalize
cchardet
ceil
+Chardet
charset
charsetdetect
chunked
@@ -204,6 +207,7 @@ login
lookup
lookups
lossless
+lowercased
manylinux
metadata
microservice
@@ -225,6 +229,7 @@ namespace
netrc
nginx
noop
+normalizer
nowait
optimizations
os
@@ -261,6 +266,7 @@ redirections
refactor
refactored
refactoring
+referenceable
regex
regexps
regexs
@@ -319,6 +325,7 @@ unittest
unix
unsets
unstripped
+uppercased
upstr
url
urldispatcher
diff --git a/docs/third_party.rst b/docs/third_party.rst
index bcf194e5ef1..c6115130d79 100644
--- a/docs/third_party.rst
+++ b/docs/third_party.rst
@@ -4,26 +4,26 @@ Third-Party libraries
=====================
-aiohttp is not the library for making HTTP requests and creating WEB
-server only.
+aiohttp is not just a library for making HTTP requests and creating web
+servers.
-It is the grand basement for libraries built *on top* of aiohttp.
+It is the foundation for libraries built *on top* of aiohttp.
This page is a list of these tools.
-Please feel free to add your open sourced library if it's not enlisted
-yet by making Pull Request to https://github.com/aio-libs/aiohttp/
+Please feel free to add your open source library if it's not listed
+yet by making a pull request to https://github.com/aio-libs/aiohttp/
-* Why do you might want to include your awesome library into the list?
+* Why would you want to include your awesome library in this list?
-* Just because the list increases your library visibility. People
+* Because the list increases your library visibility. People
will have an easy way to find it.
Officially supported
--------------------
-This list contains libraries which are supported by *aio-libs* team
+This list contains libraries which are supported by the *aio-libs* team
and located on https://github.com/aio-libs
@@ -31,7 +31,7 @@ aiohttp extensions
^^^^^^^^^^^^^^^^^^
- `aiohttp-session `_
- provides sessions for :mod:`aiohttp.web`.
+ provides sessions for :mod:`aiohttp.web`.
- `aiohttp-debugtoolbar `_
is a library for *debug toolbar* support for :mod:`aiohttp.web`.
@@ -65,7 +65,7 @@ Database drivers
- `aiopg `_ PostgreSQL async driver.
-- `aiomysql `_ MySql async driver.
+- `aiomysql `_ MySQL async driver.
- `aioredis `_ Redis async driver.
@@ -82,21 +82,21 @@ Other tools
Approved third-party libraries
------------------------------
-The libraries are not part of ``aio-libs`` but they are proven to be very
+These libraries are not part of ``aio-libs`` but they have proven to be very
well written and highly recommended for usage.
- `uvloop `_ Ultra fast
implementation of asyncio event loop on top of ``libuv``.
- We are highly recommending to use it instead of standard ``asyncio``.
+ We highly recommend to use this instead of standard ``asyncio``.
Database drivers
^^^^^^^^^^^^^^^^
- `asyncpg `_ Another
- PostgreSQL async driver. It's much faster than ``aiopg`` but it is
- not drop-in replacement -- the API is different. Anyway please take
- a look on it -- the driver is really incredible fast.
+ PostgreSQL async driver. It's much faster than ``aiopg`` but is
+ not a drop-in replacement -- the API is different. But, please take
+ a look at it -- the driver is incredibly fast.
OpenAPI / Swagger extensions
----------------------------
@@ -127,7 +127,7 @@ support to aiohttp web servers.
- `aio-openapi `_
Asynchronous web middleware for aiohttp and serving Rest APIs with OpenAPI v3
- specification and with optional PostgreSql database bindings.
+ specification and with optional PostgreSQL database bindings.
- `rororo `_
Implement ``aiohttp.web`` OpenAPI 3 server applications with schema first
@@ -136,12 +136,12 @@ support to aiohttp web servers.
Others
------
-The list of libraries which are exists but not enlisted in former categories.
+Here is a list of other known libraries that do not belong in the former categories.
-They may be perfect or not -- we don't know.
+We cannot vouch for the quality of these libraries, use them at your own risk.
Please add your library reference here first and after some time
-period ask to raise the status.
+ask to raise the status.
- `pytest-aiohttp-client `_
Pytest fixture with simpler api, payload decoding and status code assertions.
@@ -237,9 +237,6 @@ period ask to raise the status.
- `aiogram `_
A fully asynchronous library for Telegram Bot API written with asyncio and aiohttp.
-- `vk.py `_
- Extremely-fast Python 3.6+ toolkit for create applications work`s with VKAPI.
-
- `aiohttp-graphql `_
GraphQL and GraphIQL interface for aiohttp.
@@ -290,3 +287,6 @@ period ask to raise the status.
- `aiohttp-retry `_
Wrapper for aiohttp client for retrying requests. Python 3.6+ required.
+
+- `aiohttp-socks `_
+ SOCKS proxy connector for aiohttp.
diff --git a/docs/web_reference.rst b/docs/web_reference.rst
index 549e199507c..9b50a1c1412 100644
--- a/docs/web_reference.rst
+++ b/docs/web_reference.rst
@@ -745,7 +745,7 @@ StreamResponse
:param int version: a decimal integer, identifies to which
version of the state management
specification the cookie
- conforms. (Optional, *version=1* by default)
+ conforms. (optional)
:param str samesite: Asserts that a cookie must not be sent with
cross-origin requests, providing some protection
diff --git a/examples/legacy/tcp_protocol_parser.py b/examples/legacy/tcp_protocol_parser.py
index ca49db7d8f9..1ef972758e5 100755
--- a/examples/legacy/tcp_protocol_parser.py
+++ b/examples/legacy/tcp_protocol_parser.py
@@ -60,7 +60,7 @@ def stop(self):
self.transport.write(b"stop:\r\n")
def send_text(self, text):
- self.transport.write(f"text:{text.strip()}\r\n".encode("utf-8"))
+ self.transport.write(f"text:{text.strip()}\r\n".encode())
class EchoServer(asyncio.Protocol):
diff --git a/examples/web_rewrite_headers_middleware.py b/examples/web_rewrite_headers_middleware.py
index 7fc569bce7a..149dc28285d 100755
--- a/examples/web_rewrite_headers_middleware.py
+++ b/examples/web_rewrite_headers_middleware.py
@@ -2,18 +2,15 @@
"""
Example for rewriting response headers by middleware.
"""
-from typing import Awaitable, Callable
-
from aiohttp import web
-
-_WebHandler = Callable[[web.Request], Awaitable[web.StreamResponse]]
+from aiohttp.typedefs import Handler
async def handler(request: web.Request) -> web.StreamResponse:
return web.Response(text="Everything is fine")
-async def middleware(request: web.Request, handler: _WebHandler) -> web.StreamResponse:
+async def middleware(request: web.Request, handler: Handler) -> web.StreamResponse:
try:
response = await handler(request)
except web.HTTPException as exc:
diff --git a/requirements/base.txt b/requirements/base.txt
index 68e007c65b2..4c995d352d6 100644
--- a/requirements/base.txt
+++ b/requirements/base.txt
@@ -1,14 +1,14 @@
-r multidict.txt
# required c-ares will not build on windows and has build problems on Macos Python<3.7
aiodns==3.0.0; sys_platform=="linux" or sys_platform=="darwin" and python_version>="3.7"
-aiosignal==1.1.2
+aiosignal==1.2.0
async-timeout==4.0.0a3
asynctest==0.13.0; python_version<"3.8"
Brotli==1.0.9
cchardet==2.1.7
-chardet==4.0.0
-frozenlist==1.1.1
+charset-normalizer==2.0.4
+frozenlist==1.2.0
gunicorn==20.1.0
typing_extensions==3.7.4.3
uvloop==0.14.0; platform_system!="Windows" and implementation_name=="cpython" and python_version<"3.9" # MagicStack/uvloop#14
-yarl==1.6.3
+yarl==1.7.0
diff --git a/requirements/cython.in b/requirements/cython.in
index 181b8845865..af82ec193ce 100644
--- a/requirements/cython.in
+++ b/requirements/cython.in
@@ -1,3 +1,3 @@
-r multidict.txt
-cython==0.29.23
+cython==0.29.24
typing_extensions==3.7.4.3 # required for parsing aiohttp/hdrs.py by tools/gen.py
diff --git a/requirements/cython.txt b/requirements/cython.txt
index 88fae7891ab..8d63ed4b4f9 100644
--- a/requirements/cython.txt
+++ b/requirements/cython.txt
@@ -4,9 +4,9 @@
#
# pip-compile --allow-unsafe requirements/cython.in
#
-cython==0.29.23
+cython==0.29.24
# via -r requirements/cython.in
-multidict==5.1.0
+multidict==5.2.0
# via -r requirements/multidict.txt
typing_extensions==3.7.4.3
# via -r requirements/cython.in
diff --git a/requirements/dev.in b/requirements/dev.in
index 31b14be9997..1da2b9d5bc0 100644
--- a/requirements/dev.in
+++ b/requirements/dev.in
@@ -2,3 +2,5 @@
-r test.txt
-r doc.txt
cherry_picker==2.0.0; python_version>="3.6"
+python-on-whales==0.28.0
+wait-for-it==2.2.1
diff --git a/requirements/dev.txt b/requirements/dev.txt
index 8c7a85d20d9..df6a12a9ad4 100644
--- a/requirements/dev.txt
+++ b/requirements/dev.txt
@@ -8,7 +8,7 @@ aiodns==3.0.0 ; sys_platform == "linux" or sys_platform == "darwin" and python_v
# via -r requirements/base.txt
aiohttp-theme==0.1.6
# via -r requirements/doc.txt
-aiosignal==1.1.2
+aiosignal==1.2.0
# via -r requirements/base.txt
alabaster==0.7.12
# via sphinx
@@ -26,7 +26,7 @@ attrs==20.3.0
# pytest
babel==2.9.0
# via sphinx
-black==21.6b0 ; implementation_name == "cpython"
+black==21.7b0 ; implementation_name == "cpython"
# via -r requirements/lint.txt
blockdiag==2.0.1
# via sphinxcontrib-blockdiag
@@ -46,14 +46,12 @@ cfgv==3.2.0
# via
# -r requirements/lint.txt
# pre-commit
-chardet==4.0.0
+charset-normalizer==2.0.4
# via
# -r requirements/base.txt
# requests
cherry_picker==2.0.0 ; python_version >= "3.6"
# via -r requirements/dev.in
-click-default-group==1.2.2
- # via towncrier
click==7.1.2
# via
# -r requirements/lint.txt
@@ -61,7 +59,11 @@ click==7.1.2
# cherry-picker
# click-default-group
# towncrier
-coverage==5.5
+ # typer
+ # wait-for-it
+click-default-group==1.2.2
+ # via towncrier
+coverage[toml]==6.0.2
# via
# -r requirements/test.txt
# pytest-cov
@@ -79,20 +81,22 @@ filelock==3.0.12
# via
# -r requirements/lint.txt
# virtualenv
-flake8-pyi==20.10.0
- # via -r requirements/lint.txt
-flake8==3.9.2
+flake8==4.0.1
# via
# -r requirements/lint.txt
# flake8-pyi
+flake8-pyi==20.10.0
+ # via -r requirements/lint.txt
freezegun==1.1.0
# via -r requirements/test.txt
-frozenlist==1.1.1
+frozenlist==1.2.0
# via
# -r requirements/base.txt
# aiosignal
-funcparserlib==0.3.6
- # via blockdiag
+funcparserlib==1.0.0a0
+ # via
+ # -r requirements/doc.txt
+ # blockdiag
gidgethub==5.0.0
# via cherry-picker
gunicorn==20.1.0
@@ -114,7 +118,7 @@ iniconfig==1.1.1
# via
# -r requirements/lint.txt
# pytest
-isort==5.9.1
+isort==5.9.3
# via -r requirements/lint.txt
jinja2==2.11.3
# via
@@ -126,20 +130,20 @@ mccabe==0.6.1
# via
# -r requirements/lint.txt
# flake8
-multidict==5.1.0
+multidict==5.2.0
# via
# -r requirements/multidict.txt
# yarl
-mypy-extensions==0.4.3 ; implementation_name == "cpython"
+mypy==0.910 ; implementation_name == "cpython"
# via
# -r requirements/lint.txt
# -r requirements/test.txt
- # black
- # mypy
-mypy==0.790 ; implementation_name == "cpython"
+mypy-extensions==0.4.3 ; implementation_name == "cpython"
# via
# -r requirements/lint.txt
# -r requirements/test.txt
+ # black
+ # mypy
nodeenv==1.5.0
# via
# -r requirements/lint.txt
@@ -153,32 +157,36 @@ pathspec==0.8.1
# via
# -r requirements/lint.txt
# black
-pillow==8.2.0
+pillow==8.3.2
# via blockdiag
pluggy==0.13.1
# via
# -r requirements/lint.txt
# pytest
-pre-commit==2.13.0
+pre-commit==2.15.0
# via -r requirements/lint.txt
+proxy.py==2.3.1
+ # via -r requirements/test.txt
py==1.10.0
# via
# -r requirements/lint.txt
# pytest
pycares==4.0.0
# via aiodns
-pycodestyle==2.7.0
+pycodestyle==2.8.0
# via
# -r requirements/lint.txt
# flake8
pycparser==2.20
# via cffi
-pyflakes==2.3.0
+pydantic==1.8.2
+ # via python-on-whales
+pyflakes==2.4.0
# via
# -r requirements/lint.txt
# flake8
# flake8-pyi
-pygments==2.9.0
+pygments==2.10.0
# via
# -r requirements/doc.txt
# sphinx
@@ -188,18 +196,20 @@ pyparsing==2.4.7
# via
# -r requirements/lint.txt
# packaging
-pytest-cov==2.12.1
- # via -r requirements/test.txt
-pytest-mock==3.6.1
- # via -r requirements/test.txt
-pytest==6.2.2
+pytest==6.2.5
# via
# -r requirements/lint.txt
# -r requirements/test.txt
# pytest-cov
# pytest-mock
+pytest-cov==3.0.0
+ # via -r requirements/test.txt
+pytest-mock==3.6.1
+ # via -r requirements/test.txt
python-dateutil==2.8.1
# via freezegun
+python-on-whales==0.28.0
+ # via -r requirements/dev.in
pytz==2020.5
# via babel
pyyaml==5.4.1
@@ -216,10 +226,11 @@ regex==2020.11.13
requests==2.25.1
# via
# cherry-picker
+ # python-on-whales
# sphinx
setuptools-git==1.2
# via -r requirements/test.txt
-six==1.15.0
+six==1.16.0
# via
# -r requirements/lint.txt
# cryptography
@@ -227,11 +238,12 @@ six==1.15.0
# virtualenv
snowballstemmer==2.0.0
# via sphinx
-sphinx==4.0.2
+sphinx==4.2.0
# via
# -r requirements/doc.txt
# sphinxcontrib-asyncio
# sphinxcontrib-blockdiag
+ # sphinxcontrib-towncrier
sphinxcontrib-applehelp==1.0.2
# via sphinx
sphinxcontrib-asyncio==0.3.0
@@ -240,37 +252,51 @@ sphinxcontrib-blockdiag==2.0.0
# via -r requirements/doc.txt
sphinxcontrib-devhelp==1.0.2
# via sphinx
-sphinxcontrib-htmlhelp==1.0.3
+sphinxcontrib-htmlhelp==2.0.0
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.3
# via sphinx
-sphinxcontrib-serializinghtml==1.1.4
+sphinxcontrib-serializinghtml==1.1.5
# via sphinx
+sphinxcontrib-towncrier==0.2.0a0
+ # via -r requirements/doc.txt
toml==0.10.2
# via
# -r requirements/lint.txt
- # black
# cherry-picker
+ # mypy
# pre-commit
# pytest
- # pytest-cov
# towncrier
+tomli==1.2.1
+ # via
+ # -r requirements/lint.txt
+ # black
+ # coverage
towncrier==21.3.0
- # via -r requirements/doc.txt
-trustme==0.8.0 ; platform_machine != "i686"
+ # via
+ # -r requirements/doc.txt
+ # sphinxcontrib-towncrier
+tqdm==4.62.2
+ # via python-on-whales
+trustme==0.9.0 ; platform_machine != "i686"
# via -r requirements/test.txt
-typed-ast==1.4.2
+typer==0.4.0
+ # via python-on-whales
+types-chardet==0.1.3
# via
# -r requirements/lint.txt
- # mypy
+ # -r requirements/test.txt
typing-extensions==3.7.4.3
# via
# -r requirements/base.txt
# -r requirements/lint.txt
# async-timeout
# mypy
+ # proxy.py
+ # pydantic
uritemplate==3.0.1
# via gidgethub
urllib3==1.26.5
@@ -279,13 +305,15 @@ virtualenv==20.4.2
# via
# -r requirements/lint.txt
# pre-commit
+wait-for-it==2.2.1
+ # via -r requirements/dev.in
webcolors==1.11.1
# via blockdiag
-yarl==1.6.3
+yarl==1.7.0
# via -r requirements/base.txt
# The following packages are considered to be unsafe in a requirements file:
-setuptools==51.3.1
+setuptools==57.4.0
# via
# blockdiag
# gunicorn
diff --git a/requirements/doc-spelling.txt b/requirements/doc-spelling.txt
index 4ed9469e76e..a9800eef0e6 100644
--- a/requirements/doc-spelling.txt
+++ b/requirements/doc-spelling.txt
@@ -5,7 +5,7 @@
# pip-compile --allow-unsafe requirements/doc-spelling.in
#
aiohttp-theme==0.1.6
- # via -r doc.txt
+ # via -r requirements/doc.txt
alabaster==0.7.12
# via sphinx
babel==2.9.0
@@ -16,22 +16,22 @@ certifi==2020.12.5
# via requests
chardet==4.0.0
# via requests
-click-default-group==1.2.2
- # via towncrier
click==7.1.2
# via
# click-default-group
# towncrier
+click-default-group==1.2.2
+ # via towncrier
docutils==0.16
# via sphinx
-funcparserlib==0.3.6
- # via blockdiag
+funcparserlib==1.0.0a0
+ # via
+ # -r requirements/doc.txt
+ # blockdiag
idna==2.10
# via requests
imagesize==1.2.0
# via sphinx
-importlib-metadata==4.5.0
- # via sphinxcontrib-spelling
incremental==17.5.0
# via towncrier
jinja2==2.11.3
@@ -42,13 +42,13 @@ markupsafe==1.1.1
# via jinja2
packaging==20.9
# via sphinx
-pillow==8.2.0
+pillow==8.3.2
# via blockdiag
pyenchant==3.2.0
# via sphinxcontrib-spelling
-pygments==2.9.0
+pygments==2.10.0
# via
- # -r doc.txt
+ # -r requirements/doc.txt
# sphinx
pyparsing==2.4.7
# via packaging
@@ -58,42 +58,43 @@ requests==2.25.1
# via sphinx
snowballstemmer==2.1.0
# via sphinx
-sphinx==4.0.2
+sphinx==4.2.0
# via
- # -r doc.txt
+ # -r requirements/doc.txt
# sphinxcontrib-asyncio
# sphinxcontrib-blockdiag
# sphinxcontrib-spelling
+ # sphinxcontrib-towncrier
sphinxcontrib-applehelp==1.0.2
# via sphinx
sphinxcontrib-asyncio==0.3.0
- # via -r doc.txt
+ # via -r requirements/doc.txt
sphinxcontrib-blockdiag==2.0.0
- # via -r doc.txt
+ # via -r requirements/doc.txt
sphinxcontrib-devhelp==1.0.2
# via sphinx
-sphinxcontrib-htmlhelp==1.0.3
+sphinxcontrib-htmlhelp==2.0.0
# via sphinx
sphinxcontrib-jsmath==1.0.1
# via sphinx
sphinxcontrib-qthelp==1.0.3
# via sphinx
-sphinxcontrib-serializinghtml==1.1.4
+sphinxcontrib-serializinghtml==1.1.5
# via sphinx
sphinxcontrib-spelling==7.2.1 ; platform_system != "Windows"
- # via -r doc-spelling.in
+ # via -r requirements/doc-spelling.in
+sphinxcontrib-towncrier==0.2.0a0
+ # via -r requirements/doc.txt
toml==0.10.2
# via towncrier
towncrier==21.3.0
- # via -r doc.txt
-typing-extensions==3.10.0.0
- # via importlib-metadata
+ # via
+ # -r requirements/doc.txt
+ # sphinxcontrib-towncrier
urllib3==1.26.5
# via requests
webcolors==1.11.1
# via blockdiag
-zipp==3.4.1
- # via importlib-metadata
# The following packages are considered to be unsafe in a requirements file:
setuptools==53.0.0
diff --git a/requirements/doc.txt b/requirements/doc.txt
index ceb61f00ccd..29b493c8b37 100644
--- a/requirements/doc.txt
+++ b/requirements/doc.txt
@@ -1,6 +1,9 @@
aiohttp-theme==0.1.6
-pygments==2.9.0
-sphinx==4.0.2
+# Temp fix till updated: https://github.com/blockdiag/blockdiag/pull/148
+funcparserlib==1.0.0a0
+pygments==2.10.0
+sphinx==4.2.0
sphinxcontrib-asyncio==0.3.0
sphinxcontrib-blockdiag==2.0.0
+sphinxcontrib-towncrier==0.2.0a0
towncrier==21.3.0
diff --git a/requirements/lint.in b/requirements/lint.in
index f247106e3e8..daea0fba678 100644
--- a/requirements/lint.in
+++ b/requirements/lint.in
@@ -1,7 +1,8 @@
-black==21.6b0; implementation_name=="cpython"
-flake8==3.9.2
+black==21.7b0; implementation_name=="cpython"
+flake8==4.0.1
flake8-pyi==20.10.0
-isort==5.9.1
-mypy==0.790; implementation_name=="cpython"
-pre-commit==2.13.0
-pytest==6.2.2
+isort==5.9.3
+mypy==0.910; implementation_name=="cpython"
+pre-commit==2.15.0
+pytest==6.2.5
+types-chardet==0.1.3
diff --git a/requirements/lint.txt b/requirements/lint.txt
index 01671b28b00..38c1c2c3492 100644
--- a/requirements/lint.txt
+++ b/requirements/lint.txt
@@ -12,7 +12,7 @@ attrs==20.3.0
# via
# flake8-pyi
# pytest
-black==21.6b0 ; implementation_name == "cpython"
+black==21.7b0 ; implementation_name == "cpython"
# via -r requirements/lint.in
cfgv==3.2.0
# via pre-commit
@@ -22,26 +22,26 @@ distlib==0.3.1
# via virtualenv
filelock==3.0.12
# via virtualenv
-flake8-pyi==20.10.0
- # via -r requirements/lint.in
-flake8==3.9.2
+flake8==4.0.1
# via
# -r requirements/lint.in
# flake8-pyi
+flake8-pyi==20.10.0
+ # via -r requirements/lint.in
identify==1.5.14
# via pre-commit
iniconfig==1.1.1
# via pytest
-isort==5.9.1
+isort==5.9.3
# via -r requirements/lint.in
mccabe==0.6.1
# via flake8
+mypy==0.910 ; implementation_name == "cpython"
+ # via -r requirements/lint.in
mypy-extensions==0.4.3
# via
# black
# mypy
-mypy==0.790 ; implementation_name == "cpython"
- # via -r requirements/lint.in
nodeenv==1.5.0
# via pre-commit
packaging==20.9
@@ -50,33 +50,35 @@ pathspec==0.8.1
# via black
pluggy==0.13.1
# via pytest
-pre-commit==2.13.0
+pre-commit==2.15.0
# via -r requirements/lint.in
py==1.10.0
# via pytest
-pycodestyle==2.7.0
+pycodestyle==2.8.0
# via flake8
-pyflakes==2.3.0
+pyflakes==2.4.0
# via
# flake8
# flake8-pyi
pyparsing==2.4.7
# via packaging
-pytest==6.2.2
+pytest==6.2.5
# via -r requirements/lint.in
pyyaml==5.4.1
# via pre-commit
regex==2020.11.13
# via black
-six==1.15.0
+six==1.16.0
# via virtualenv
toml==0.10.2
# via
- # black
+ # mypy
# pre-commit
# pytest
-typed-ast==1.4.2
- # via mypy
+tomli==1.2.1
+ # via black
+types-chardet==0.1.3
+ # via -r requirements/lint.in
typing-extensions==3.7.4.3
# via mypy
virtualenv==20.4.2
diff --git a/requirements/multidict.txt b/requirements/multidict.txt
index 7357d4643f0..d3e1b42f470 100644
--- a/requirements/multidict.txt
+++ b/requirements/multidict.txt
@@ -1 +1 @@
-multidict==5.1.0
+multidict==5.2.0
diff --git a/requirements/test.txt b/requirements/test.txt
index 36707a4f45f..ccdb1b8ff68 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -1,14 +1,16 @@
-r base.txt
Brotli==1.0.9
-coverage==5.5
+coverage==6.0.2
cryptography==3.3.1; platform_machine!="i686" and python_version<"3.9" # no 32-bit wheels; no python 3.9 wheels yet
freezegun==1.1.0
-mypy==0.790; implementation_name=="cpython"
+mypy==0.910; implementation_name=="cpython"
mypy-extensions==0.4.3; implementation_name=="cpython"
-pytest==6.2.2
-pytest-cov==2.12.1
+proxy.py==2.3.1
+pytest==6.2.5
+pytest-cov==3.0.0
pytest-mock==3.6.1
re-assert==1.1.0
setuptools-git==1.2
-trustme==0.8.0; platform_machine!="i686" # no 32-bit wheels
+trustme==0.9.0; platform_machine!="i686" # no 32-bit wheels
+types-chardet==0.1.3
diff --git a/setup.cfg b/setup.cfg
index 7f97f1cce61..5f5dc0bd3bc 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -50,6 +50,10 @@ addopts =
# `pytest-cov`:
--cov=aiohttp
+ --cov=tests/
+
+ # run tests that are not marked with dev_mode
+ -m "not dev_mode"
filterwarnings =
error
ignore:module 'ssl' has no attribute 'OP_NO_COMPRESSION'. The Python interpreter is compiled against OpenSSL < 1.0.0. Ref. https.//docs.python.org/3/library/ssl.html#ssl.OP_NO_COMPRESSION:UserWarning
@@ -58,73 +62,12 @@ filterwarnings =
ignore:Exception ignored in. :pytest.PytestUnraisableExceptionWarning:_pytest.unraisableexception
+ ignore:The loop argument is deprecated:DeprecationWarning:asyncio
junit_suite_name = aiohttp_test_suite
norecursedirs = dist docs build .tox .eggs
minversion = 3.8.2
testpaths = tests/
junit_family=xunit2
xfail_strict = true
-
-[mypy]
-follow_imports = silent
-strict_optional = True
-warn_redundant_casts = True
-warn_unused_ignores = True
-
-# uncomment next lines
-# to enable strict mypy mode
-#
-check_untyped_defs = True
-disallow_any_generics = True
-disallow_untyped_defs = True
-
-
-[mypy-pytest]
-ignore_missing_imports = true
-
-
-[mypy-uvloop]
-ignore_missing_imports = true
-
-
-[mypy-tokio]
-ignore_missing_imports = true
-
-
-[mypy-aiodns]
-ignore_missing_imports = true
-
-
-[mypy-gunicorn.config]
-ignore_missing_imports = true
-
-[mypy-gunicorn.workers]
-ignore_missing_imports = true
-
-
-[mypy-brotli]
-ignore_missing_imports = true
-
-
-[mypy-chardet]
-ignore_missing_imports = true
-
-
-[mypy-cchardet]
-ignore_missing_imports = true
-
-
-[mypy-idna_ssl]
-ignore_missing_imports = true
-
-
-[mypy-asynctest]
-ignore_missing_imports = true
-
-
-[mypy-re_assert]
-ignore_missing_imports = true
-
-
-[mypy-trustme]
-ignore_missing_imports = true
+markers =
+ dev_mode: mark test to run in dev mode.
diff --git a/setup.py b/setup.py
index 54b548c7b44..a73d331ea07 100644
--- a/setup.py
+++ b/setup.py
@@ -50,7 +50,7 @@
raise RuntimeError("Unable to determine version.")
install_requires = [
- "chardet>=2.0,<5.0",
+ "charset-normalizer>=2.0,<3.0",
"multidict>=4.5,<7.0",
"async_timeout>=4.0a2,<5.0",
'asynctest==0.13.0; python_version<"3.8"',
@@ -79,6 +79,7 @@ def read(f):
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
+ "Programming Language :: Python :: 3.10",
"Development Status :: 5 - Production/Stable",
"Operating System :: POSIX",
"Operating System :: MacOS :: MacOS X",
diff --git a/tests/autobahn/.gitignore b/tests/autobahn/.gitignore
deleted file mode 100644
index 08ab34c5253..00000000000
--- a/tests/autobahn/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/reports
diff --git a/tests/autobahn/Dockerfile.aiohttp b/tests/autobahn/Dockerfile.aiohttp
index 9e20d401596..2d37683a1ad 100644
--- a/tests/autobahn/Dockerfile.aiohttp
+++ b/tests/autobahn/Dockerfile.aiohttp
@@ -5,13 +5,3 @@ COPY ./ /src
WORKDIR /src
RUN pip install .
-# NOTE: The attrs lib is necessary because it's
-# NOTE: still a de-facto runtime dependency of
-# NOTE: aiohttp. Although it shouldn't be anymore
-# NOTE: after the effort to get rid of it.
-# NOTE: It must be removed once made redundant.
-# Refs:
-# * https://github.com/aio-libs/aiohttp/issues/5806
-# * https://github.com/aio-libs/aiohttp/pull/5679/files#r654428147
-# * https://github.com/aio-libs/aiohttp/pull/5284
-RUN pip install attrs
diff --git a/tests/autobahn/Dockerfile.autobahn b/tests/autobahn/Dockerfile.autobahn
new file mode 100644
index 00000000000..45f18182804
--- /dev/null
+++ b/tests/autobahn/Dockerfile.autobahn
@@ -0,0 +1,6 @@
+FROM crossbario/autobahn-testsuite:0.8.2
+
+RUN apt-get update && apt-get install python3 python3-pip -y
+RUN pip3 install wait-for-it
+
+CMD ["wstest", "--mode", "fuzzingserver", "--spec", "/config/fuzzingserver.json"]
diff --git a/tests/autobahn/client/client.py b/tests/autobahn/client/client.py
index afb309aef36..dfca77d12b2 100644
--- a/tests/autobahn/client/client.py
+++ b/tests/autobahn/client/client.py
@@ -5,7 +5,7 @@
import aiohttp
-async def client(url, name):
+async def client(url: str, name: str) -> None:
async with aiohttp.ClientSession() as session:
async with session.ws_connect(url + "/getCaseCount") as ws:
num_tests = int((await ws.receive()).data)
@@ -28,7 +28,7 @@ async def client(url, name):
print("finally requesting %s" % url)
-async def run(url, name):
+async def run(url: str, name: str) -> None:
try:
await client(url, name)
except Exception:
@@ -38,4 +38,4 @@ async def run(url, name):
if __name__ == "__main__":
- asyncio.run(run("http://autobahn:9001", "aiohttp"))
+ asyncio.run(run("http://localhost:9001", "aiohttp"))
diff --git a/tests/autobahn/client/docker-compose.yml b/tests/autobahn/client/docker-compose.yml
deleted file mode 100644
index ac6a8bf3ab7..00000000000
--- a/tests/autobahn/client/docker-compose.yml
+++ /dev/null
@@ -1,17 +0,0 @@
-version: "3.9"
-services:
- autobahn:
- image: crossbario/autobahn-testsuite:0.8.2
- volumes:
- - type: bind
- source: ./fuzzingserver.json
- target: /config/fuzzingserver.json
- - type: bind
- source: ../reports
- target: /reports
-
- aiohttp:
- image: aiohttp-autobahn_aiohttp
- depends_on:
- - autobahn
- command: ["python", "tests/autobahn/client/client.py"]
diff --git a/tests/autobahn/docker-compose.yml b/tests/autobahn/docker-compose.yml
deleted file mode 100644
index ea6b640810d..00000000000
--- a/tests/autobahn/docker-compose.yml
+++ /dev/null
@@ -1,6 +0,0 @@
-version: "3.9"
-services:
- aiohttp:
- build:
- context: ../..
- dockerfile: tests/autobahn/Dockerfile.aiohttp
diff --git a/tests/autobahn/run-tests.sh b/tests/autobahn/run-tests.sh
deleted file mode 100755
index d48894d8cb8..00000000000
--- a/tests/autobahn/run-tests.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-rm -rf $PWD/reports
-mkdir $PWD/reports
-
-docker-compose -p aiohttp-autobahn build
-
-docker-compose -f $PWD/client/docker-compose.yml up --abort-on-container-exit
-docker-compose -f $PWD/client/docker-compose.yml down
-
-docker-compose -f $PWD/server/docker-compose.yml up --abort-on-container-exit
-docker-compose -f $PWD/server/docker-compose.yml down
diff --git a/tests/autobahn/server/docker-compose.yml b/tests/autobahn/server/docker-compose.yml
deleted file mode 100644
index 8f12f2d19cc..00000000000
--- a/tests/autobahn/server/docker-compose.yml
+++ /dev/null
@@ -1,18 +0,0 @@
-version: "3.9"
-services:
- autobahn:
- image: crossbario/autobahn-testsuite:0.8.2
- depends_on:
- - aiohttp
- volumes:
- - type: bind
- source: ./fuzzingclient.json
- target: /config/fuzzingclient.json
- - type: bind
- source: ../reports
- target: /reports
- command: ["wstest", "--mode", "fuzzingclient", "--spec", "/config/fuzzingclient.json"]
-
- aiohttp:
- image: aiohttp-autobahn_aiohttp
- command: ["python", "tests/autobahn/server/server.py"]
diff --git a/tests/autobahn/server/fuzzingclient.json b/tests/autobahn/server/fuzzingclient.json
index e9bef9591dc..0ed2f84acf8 100644
--- a/tests/autobahn/server/fuzzingclient.json
+++ b/tests/autobahn/server/fuzzingclient.json
@@ -5,7 +5,7 @@
"servers": [
{
"agent": "AutobahnServer",
- "url": "ws://aiohttp:9001",
+ "url": "ws://localhost:9001",
"options": { "version": 18 }
}
],
diff --git a/tests/autobahn/server/server.py b/tests/autobahn/server/server.py
index 684cdcce6ff..c0e50259b47 100644
--- a/tests/autobahn/server/server.py
+++ b/tests/autobahn/server/server.py
@@ -5,14 +5,16 @@
from aiohttp import WSCloseCode, web
-async def wshandler(request):
+async def wshandler(request: web.Request) -> web.WebSocketResponse:
ws = web.WebSocketResponse(autoclose=False)
is_ws = ws.can_prepare(request)
if not is_ws:
- return web.HTTPBadRequest()
+ raise web.HTTPBadRequest()
await ws.prepare(request)
+ request.app["websockets"].append(ws)
+
while True:
msg = await ws.receive()
@@ -29,7 +31,7 @@ async def wshandler(request):
return ws
-async def on_shutdown(app):
+async def on_shutdown(app: web.Application) -> None:
for ws in set(app["websockets"]):
await ws.close(code=WSCloseCode.GOING_AWAY, message="Server shutdown")
@@ -40,6 +42,7 @@ async def on_shutdown(app):
)
app = web.Application()
+ app["websockets"] = []
app.router.add_route("GET", "/", wshandler)
app.on_shutdown.append(on_shutdown)
try:
diff --git a/tests/autobahn/test_autobahn.py b/tests/autobahn/test_autobahn.py
new file mode 100644
index 00000000000..5d72e37a17a
--- /dev/null
+++ b/tests/autobahn/test_autobahn.py
@@ -0,0 +1,132 @@
+import json
+import subprocess
+import sys
+from pathlib import Path
+from typing import Any, Dict, Generator, List
+
+import pytest
+from pytest import TempPathFactory
+from python_on_whales import DockerException, docker
+
+
+@pytest.fixture(scope="session")
+def report_dir(tmp_path_factory: TempPathFactory) -> Path:
+ return tmp_path_factory.mktemp("reports")
+
+
+@pytest.fixture(scope="session", autouse=True)
+def build_autobahn_testsuite() -> Generator[None, None, None]:
+
+ try:
+ docker.build(
+ file="tests/autobahn/Dockerfile.autobahn",
+ tags=["autobahn-testsuite"],
+ context_path=".",
+ )
+ except DockerException:
+ pytest.skip(msg="The docker daemon is not running.")
+
+ try:
+ yield
+ finally:
+ docker.image.remove(x="autobahn-testsuite")
+
+
+def get_failed_tests(report_path: str, name: str) -> List[Dict[str, Any]]:
+ path = Path(report_path)
+ result_summary = json.loads((path / "index.json").read_text())[name]
+ failed_messages = []
+ PASS = {"OK", "INFORMATIONAL"}
+ entry_fields = {"case", "description", "expectation", "expected", "received"}
+ for results in result_summary.values():
+ if results["behavior"] in PASS and results["behaviorClose"] in PASS:
+ continue
+ report = json.loads((path / results["reportfile"]).read_text())
+ failed_messages.append({field: report[field] for field in entry_fields})
+ return failed_messages
+
+
+@pytest.mark.skipif(sys.platform == "darwin", reason="Don't run on macOS")
+@pytest.mark.xfail
+def test_client(report_dir: Path, request: Any) -> None:
+ try:
+ print("Starting autobahn-testsuite server")
+ autobahn_container = docker.run(
+ detach=True,
+ image="autobahn-testsuite",
+ name="autobahn",
+ publish=[(9001, 9001)],
+ remove=True,
+ volumes=[
+ (f"{request.fspath.dirname}/client", "/config"),
+ (f"{report_dir}", "/reports"),
+ ],
+ )
+ print("Running aiohttp test client")
+ client = subprocess.Popen(
+ ["wait-for-it", "-s", "localhost:9001", "--"]
+ + [sys.executable]
+ + ["tests/autobahn/client/client.py"]
+ )
+ client.wait()
+ finally:
+ print("Stopping client and server")
+ client.terminate()
+ client.wait()
+ autobahn_container.stop()
+
+ failed_messages = get_failed_tests(f"{report_dir}/clients", "aiohttp")
+
+ assert not failed_messages, "\n".join(
+ "\n\t".join(
+ f"{field}: {msg[field]}"
+ for field in ("case", "description", "expectation", "expected", "received")
+ )
+ for msg in failed_messages
+ )
+
+
+@pytest.mark.skipif(sys.platform == "darwin", reason="Don't run on macOS")
+@pytest.mark.xfail
+def test_server(report_dir: Path, request: Any) -> None:
+ try:
+ print("Starting aiohttp test server")
+ server = subprocess.Popen(
+ [sys.executable] + ["tests/autobahn/server/server.py"]
+ )
+ print("Starting autobahn-testsuite client")
+ docker.run(
+ image="autobahn-testsuite",
+ name="autobahn",
+ remove=True,
+ volumes=[
+ (f"{request.fspath.dirname}/server", "/config"),
+ (f"{report_dir}", "/reports"),
+ ],
+ networks=["host"],
+ command=[
+ "wait-for-it",
+ "-s",
+ "localhost:9001",
+ "--",
+ "wstest",
+ "--mode",
+ "fuzzingclient",
+ "--spec",
+ "/config/fuzzingclient.json",
+ ],
+ )
+ finally:
+ print("Stopping client and server")
+ server.terminate()
+ server.wait()
+
+ failed_messages = get_failed_tests(f"{report_dir}/servers", "AutobahnServer")
+
+ assert not failed_messages, "\n".join(
+ "\n\t".join(
+ f"{field}: {msg[field]}"
+ for field in ("case", "description", "expectation", "expected", "received")
+ )
+ for msg in failed_messages
+ )
diff --git a/tests/conftest.py b/tests/conftest.py
index eda5c60b727..0922d3b21f3 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -55,7 +55,7 @@ def tls_certificate(tls_certificate_authority: Any) -> Any:
@pytest.fixture
def ssl_ctx(tls_certificate: Any) -> ssl.SSLContext:
- ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
+ ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
tls_certificate.configure_cert(ssl_ctx)
return ssl_ctx
diff --git a/tests/test_client_functional.py b/tests/test_client_functional.py
index 52d74d98324..058d1594ec3 100644
--- a/tests/test_client_functional.py
+++ b/tests/test_client_functional.py
@@ -8,11 +8,13 @@
import json
import pathlib
import socket
+import ssl
from typing import Any
from unittest import mock
import pytest
from multidict import MultiDict
+from yarl import URL
import aiohttp
from aiohttp import Fingerprint, ServerFingerprintMismatch, hdrs, web
@@ -994,7 +996,7 @@ async def handler(request):
async def redirect(request):
count = int(request.match_info["count"])
if count:
- raise web.HTTPFound(location="/redirect/{}".format(count - 1))
+ raise web.HTTPFound(location=f"/redirect/{count - 1}")
else:
raise web.HTTPFound(location="/")
@@ -2333,25 +2335,85 @@ async def test_creds_in_auth_and_url() -> None:
await session.close()
-async def test_drop_auth_on_redirect_to_other_host(aiohttp_server: Any) -> None:
- async def srv1(request):
- assert request.host == "host1.com"
+@pytest.fixture
+def create_server_for_url_and_handler(
+ aiohttp_server: Any, tls_certificate_authority: Any
+):
+ def create(url: URL, srv: Any):
+ app = web.Application()
+ app.router.add_route("GET", url.path, srv)
+
+ kwargs = {}
+ if url.scheme == "https":
+ cert = tls_certificate_authority.issue_cert(
+ url.host, "localhost", "127.0.0.1"
+ )
+ ssl_ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
+ cert.configure_cert(ssl_ctx)
+ kwargs["ssl"] = ssl_ctx
+ return aiohttp_server(app, **kwargs)
+
+ return create
+
+
+@pytest.mark.parametrize(
+ ["url_from", "url_to", "is_drop_header_expected"],
+ [
+ [
+ "http://host1.com/path1",
+ "http://host2.com/path2",
+ True,
+ ],
+ ["http://host1.com/path1", "https://host1.com/path1", False],
+ ["https://host1.com/path1", "http://host1.com/path2", True],
+ ],
+ ids=(
+ "entirely different hosts",
+ "http -> https",
+ "https -> http",
+ ),
+)
+async def test_drop_auth_on_redirect_to_other_host(
+ create_server_for_url_and_handler: Any,
+ url_from: str,
+ url_to: str,
+ is_drop_header_expected: bool,
+) -> None:
+ url_from, url_to = URL(url_from), URL(url_to)
+
+ async def srv_from(request):
+ assert request.host == url_from.host
assert request.headers["Authorization"] == "Basic dXNlcjpwYXNz"
- raise web.HTTPFound("http://host2.com/path2")
+ raise web.HTTPFound(url_to)
- async def srv2(request):
- assert request.host == "host2.com"
- assert "Authorization" not in request.headers
+ async def srv_to(request):
+ assert request.host == url_to.host
+ if is_drop_header_expected:
+ assert "Authorization" not in request.headers, "Header wasn't dropped"
+ else:
+ assert "Authorization" in request.headers, "Header was dropped"
return web.Response()
- app = web.Application()
- app.router.add_route("GET", "/path1", srv1)
- app.router.add_route("GET", "/path2", srv2)
+ server_from = await create_server_for_url_and_handler(url_from, srv_from)
+ server_to = await create_server_for_url_and_handler(url_to, srv_to)
- server = await aiohttp_server(app)
+ assert (
+ url_from.host != url_to.host or server_from.scheme != server_to.scheme
+ ), "Invalid test case, host or scheme must differ"
+
+ protocol_port_map = {
+ "http": 80,
+ "https": 443,
+ }
+ etc_hosts = {
+ (url_from.host, protocol_port_map[server_from.scheme]): server_from,
+ (url_to.host, protocol_port_map[server_to.scheme]): server_to,
+ }
class FakeResolver(AbstractResolver):
async def resolve(self, host, port=0, family=socket.AF_INET):
+ server = etc_hosts[(host, port)]
+
return [
{
"hostname": host,
@@ -2366,14 +2428,17 @@ async def resolve(self, host, port=0, family=socket.AF_INET):
async def close(self):
pass
- connector = aiohttp.TCPConnector(resolver=FakeResolver())
+ connector = aiohttp.TCPConnector(resolver=FakeResolver(), ssl=False)
+
async with aiohttp.ClientSession(connector=connector) as client:
resp = await client.get(
- "http://host1.com/path1", auth=aiohttp.BasicAuth("user", "pass")
+ url_from,
+ auth=aiohttp.BasicAuth("user", "pass"),
)
assert resp.status == 200
resp = await client.get(
- "http://host1.com/path1", headers={"Authorization": "Basic dXNlcjpwYXNz"}
+ url_from,
+ headers={"Authorization": "Basic dXNlcjpwYXNz"},
)
assert resp.status == 200
diff --git a/tests/test_client_request.py b/tests/test_client_request.py
index cfe2a45edc7..6ab8761778a 100644
--- a/tests/test_client_request.py
+++ b/tests/test_client_request.py
@@ -119,11 +119,6 @@ def test_version_err(make_request: Any) -> None:
make_request("get", "http://python.org/", version="1.c")
-def test_https_proxy(make_request: Any) -> None:
- with pytest.raises(ValueError):
- make_request("get", "http://python.org/", proxy=URL("https://proxy.org"))
-
-
def test_keep_alive(make_request: Any) -> None:
req = make_request("get", "http://python.org/", version=(0, 9))
assert not req.keep_alive()
diff --git a/tests/test_connector.py b/tests/test_connector.py
index e57d7ecebef..fa8d6f6ff88 100644
--- a/tests/test_connector.py
+++ b/tests/test_connector.py
@@ -81,7 +81,11 @@ async def go(app):
def create_mocked_conn(conn_closing_result: Optional[Any] = None, **kwargs: Any):
assert "loop" not in kwargs
- loop = asyncio.get_event_loop()
+ try:
+ loop = asyncio.get_running_loop()
+ except RuntimeError:
+ loop = asyncio.get_event_loop_policy().get_event_loop()
+
proto = mock.Mock(**kwargs)
proto.closed = loop.create_future()
proto.closed.set_result(conn_closing_result)
@@ -188,7 +192,7 @@ async def test_del_with_scheduled_cleanup(loop: Any) -> None:
# obviously doesn't deletion because loop has a strong
# reference to connector's instance method, isn't it?
del conn
- await asyncio.sleep(0.01, loop=loop)
+ await asyncio.sleep(0.01)
gc.collect()
assert not conns_impl
@@ -1267,7 +1271,7 @@ async def test___get_ssl_context1(loop: Any) -> None:
async def test___get_ssl_context2(loop: Any) -> None:
- ctx = ssl.SSLContext()
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
conn = aiohttp.TCPConnector()
req = mock.Mock()
req.is_ssl.return_value = True
@@ -1276,7 +1280,7 @@ async def test___get_ssl_context2(loop: Any) -> None:
async def test___get_ssl_context3(loop: Any) -> None:
- ctx = ssl.SSLContext()
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
conn = aiohttp.TCPConnector(ssl=ctx)
req = mock.Mock()
req.is_ssl.return_value = True
@@ -1285,7 +1289,7 @@ async def test___get_ssl_context3(loop: Any) -> None:
async def test___get_ssl_context4(loop: Any) -> None:
- ctx = ssl.SSLContext()
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
conn = aiohttp.TCPConnector(ssl=ctx)
req = mock.Mock()
req.is_ssl.return_value = True
@@ -1294,7 +1298,7 @@ async def test___get_ssl_context4(loop: Any) -> None:
async def test___get_ssl_context5(loop: Any) -> None:
- ctx = ssl.SSLContext()
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
conn = aiohttp.TCPConnector(ssl=ctx)
req = mock.Mock()
req.is_ssl.return_value = True
diff --git a/tests/test_cookiejar.py b/tests/test_cookiejar.py
index 5a740e0dfb8..a0212dcd049 100644
--- a/tests/test_cookiejar.py
+++ b/tests/test_cookiejar.py
@@ -26,7 +26,7 @@ def cookies_to_send():
"different-domain-cookie=sixth; Domain=different.org; "
"secure-cookie=seventh; Domain=secure.com; Secure; "
"no-path-cookie=eighth; Domain=pathtest.com; "
- "path1-cookie=nineth; Domain=pathtest.com; Path=/; "
+ "path1-cookie=ninth; Domain=pathtest.com; Path=/; "
"path2-cookie=tenth; Domain=pathtest.com; Path=/one; "
"path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; "
"path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; "
@@ -52,7 +52,7 @@ def cookies_to_send_with_expired():
"different-domain-cookie=sixth; Domain=different.org; "
"secure-cookie=seventh; Domain=secure.com; Secure; "
"no-path-cookie=eighth; Domain=pathtest.com; "
- "path1-cookie=nineth; Domain=pathtest.com; Path=/; "
+ "path1-cookie=ninth; Domain=pathtest.com; Path=/; "
"path2-cookie=tenth; Domain=pathtest.com; Path=/one; "
"path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; "
"path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; "
@@ -78,7 +78,7 @@ def cookies_to_receive():
"different-domain-cookie=sixth; Domain=different.org; Path=/; "
"no-path-cookie=seventh; Domain=pathtest.com; "
"path-cookie=eighth; Domain=pathtest.com; Path=/somepath; "
- "wrong-path-cookie=nineth; Domain=pathtest.com; Path=somepath;"
+ "wrong-path-cookie=ninth; Domain=pathtest.com; Path=somepath;"
)
@@ -254,7 +254,7 @@ async def test_domain_filter_ip_cookie_send(loop: Any) -> None:
"different-domain-cookie=sixth; Domain=different.org; "
"secure-cookie=seventh; Domain=secure.com; Secure; "
"no-path-cookie=eighth; Domain=pathtest.com; "
- "path1-cookie=nineth; Domain=pathtest.com; Path=/; "
+ "path1-cookie=ninth; Domain=pathtest.com; Path=/; "
"path2-cookie=tenth; Domain=pathtest.com; Path=/one; "
"path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; "
"path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; "
@@ -371,7 +371,7 @@ def setUp(self):
"different-domain-cookie=sixth; Domain=different.org; "
"secure-cookie=seventh; Domain=secure.com; Secure; "
"no-path-cookie=eighth; Domain=pathtest.com; "
- "path1-cookie=nineth; Domain=pathtest.com; Path=/; "
+ "path1-cookie=ninth; Domain=pathtest.com; Path=/; "
"path2-cookie=tenth; Domain=pathtest.com; Path=/one; "
"path3-cookie=eleventh; Domain=pathtest.com; Path=/one/two; "
"path4-cookie=twelfth; Domain=pathtest.com; Path=/one/two/; "
@@ -394,7 +394,7 @@ def setUp(self):
"different-domain-cookie=sixth; Domain=different.org; Path=/; "
"no-path-cookie=seventh; Domain=pathtest.com; "
"path-cookie=eighth; Domain=pathtest.com; Path=/somepath; "
- "wrong-path-cookie=nineth; Domain=pathtest.com; Path=somepath;"
+ "wrong-path-cookie=ninth; Domain=pathtest.com; Path=somepath;"
)
async def make_jar():
diff --git a/tests/test_formdata.py b/tests/test_formdata.py
index 3ed90cf3dc9..959e79d98be 100644
--- a/tests/test_formdata.py
+++ b/tests/test_formdata.py
@@ -100,8 +100,10 @@ async def handler(request):
data = FormData()
data.add_field("test", "test_value", content_type="application/json")
- await client.post("/", data=data)
+ resp = await client.post("/", data=data)
assert len(data._writer._parts) == 1
with pytest.raises(RuntimeError):
await client.post("/", data=data)
+
+ resp.release()
\ No newline at end of file
diff --git a/tests/test_helpers.py b/tests/test_helpers.py
index e9b99e12170..8f029f15666 100644
--- a/tests/test_helpers.py
+++ b/tests/test_helpers.py
@@ -172,7 +172,7 @@ def test_basic_auth_decode_invalid_credentials() -> None:
),
)
def test_basic_auth_decode_blank_username(credentials, expected_auth) -> None:
- header = "Basic {}".format(base64.b64encode(credentials.encode()).decode())
+ header = f"Basic {base64.b64encode(credentials.encode()).decode()}"
assert helpers.BasicAuth.decode(header) == expected_auth
diff --git a/tests/test_http_parser.py b/tests/test_http_parser.py
index 172d7bc30cf..80913ae4360 100644
--- a/tests/test_http_parser.py
+++ b/tests/test_http_parser.py
@@ -978,7 +978,7 @@ async def test_http_payload_parser_deflate(self, stream: Any) -> None:
assert out.is_eof()
async def test_http_payload_parser_deflate_no_hdrs(self, stream: Any) -> None:
- """Tests incorrectly formed data (no zlib headers) """
+ """Tests incorrectly formed data (no zlib headers)"""
# c=compressobj(wbits=-15); b''.join([c.compress(b'data'), c.flush()])
COMPRESSED = b"KI,I\x04\x00"
diff --git a/tests/test_http_writer.py b/tests/test_http_writer.py
index ab62ffc31b5..3fb5531ca1d 100644
--- a/tests/test_http_writer.py
+++ b/tests/test_http_writer.py
@@ -5,6 +5,7 @@
from unittest import mock
import pytest
+from multidict import CIMultiDict
from aiohttp import http
from aiohttp.test_utils import make_mocked_coro
@@ -272,3 +273,16 @@ async def test_drain_no_transport(protocol: Any, transport: Any, loop: Any) -> N
msg._protocol.transport = None
await msg.drain()
assert not protocol._drain_helper.called
+
+
+async def test_write_headers_prevents_injection(
+ protocol: Any, transport: Any, loop: Any
+) -> None:
+ msg = http.StreamWriter(protocol, loop)
+ status_line = "HTTP/1.1 200 OK"
+ wrong_headers = CIMultiDict({"Set-Cookie: abc=123\r\nContent-Length": "256"})
+ with pytest.raises(ValueError):
+ await msg.write_headers(status_line, wrong_headers)
+ wrong_headers = CIMultiDict({"Content-Length": "256\r\nSet-Cookie: abc=123"})
+ with pytest.raises(ValueError):
+ await msg.write_headers(status_line, wrong_headers)
diff --git a/tests/test_locks.py b/tests/test_locks.py
index d2d3e8f141e..60a816ed647 100644
--- a/tests/test_locks.py
+++ b/tests/test_locks.py
@@ -19,7 +19,7 @@ async def c() -> Union[int, Exception]:
return 1
t = loop.create_task(c())
- await asyncio.sleep(0, loop=loop)
+ await asyncio.sleep(0)
e = Exception()
ev.set(exc=e)
assert (await t) == e
@@ -32,7 +32,7 @@ async def c() -> int:
return 1
t = loop.create_task(c())
- await asyncio.sleep(0, loop=loop)
+ await asyncio.sleep(0)
ev.set()
assert (await t) == 1
@@ -44,7 +44,7 @@ async def c() -> None:
t1 = loop.create_task(c())
t2 = loop.create_task(c())
- await asyncio.sleep(0, loop=loop)
+ await asyncio.sleep(0)
ev.cancel()
ev.set()
diff --git a/tests/test_loop.py b/tests/test_loop.py
index 50fe5a8ad69..f5a4c7774e1 100644
--- a/tests/test_loop.py
+++ b/tests/test_loop.py
@@ -6,14 +6,15 @@
import pytest
from aiohttp import web
-from aiohttp.test_utils import AioHTTPTestCase
+from aiohttp.helpers import PY_38
+from aiohttp.test_utils import AioHTTPTestCase, loop_context
@pytest.mark.skipif(
platform.system() == "Windows", reason="the test is not valid for Windows"
)
async def test_subprocess_co(loop: Any) -> None:
- assert threading.current_thread() is threading.main_thread()
+ assert PY_38 or threading.current_thread() is threading.main_thread()
proc = await asyncio.create_subprocess_shell(
"exit 0",
stdin=asyncio.subprocess.DEVNULL,
@@ -38,8 +39,29 @@ async def test_on_startup_hook(self) -> None:
self.assertTrue(self.on_startup_called)
def test_default_loop(self) -> None:
- self.assertIs(self.loop, asyncio.get_event_loop())
+ self.assertIs(self.loop, asyncio.get_event_loop_policy().get_event_loop())
def test_default_loop(loop: Any) -> None:
- assert asyncio.get_event_loop() is loop
+ assert asyncio.get_event_loop_policy().get_event_loop() is loop
+
+
+@pytest.mark.xfail(not PY_38, reason="ThreadedChildWatcher is only available in 3.8+")
+def test_setup_loop_non_main_thread() -> None:
+ child_exc = None
+
+ def target() -> None:
+ try:
+ with loop_context() as loop:
+ assert asyncio.get_event_loop_policy().get_event_loop() is loop
+ loop.run_until_complete(test_subprocess_co(loop))
+ except Exception as exc:
+ nonlocal child_exc
+ child_exc = exc
+
+ # Ensures setup_test_loop can be called by pytest-xdist in non-main thread.
+ t = threading.Thread(target=target)
+ t.start()
+ t.join()
+
+ assert child_exc is None
diff --git a/tests/test_proxy.py b/tests/test_proxy.py
index c778f85f531..af869ee88f7 100644
--- a/tests/test_proxy.py
+++ b/tests/test_proxy.py
@@ -228,6 +228,7 @@ async def make_conn():
tr, proto = mock.Mock(), mock.Mock()
self.loop.create_connection = make_mocked_coro((tr, proto))
+ self.loop.start_tls = make_mocked_coro(mock.Mock())
req = ClientRequest(
"GET",
@@ -242,8 +243,6 @@ async def make_conn():
self.assertEqual(req.url.path, "/")
self.assertEqual(proxy_req.method, "CONNECT")
self.assertEqual(proxy_req.url, URL("https://www.python.org"))
- tr.close.assert_called_once_with()
- tr.get_extra_info.assert_called_with("socket", default=None)
self.loop.run_until_complete(proxy_req.close())
proxy_resp.close()
@@ -287,22 +286,10 @@ async def make_conn():
]
)
- seq = 0
-
- async def create_connection(*args, **kwargs):
- nonlocal seq
- seq += 1
-
- # connection to http://proxy.example.com
- if seq == 1:
- return mock.Mock(), mock.Mock()
- # connection to https://www.python.org
- elif seq == 2:
- raise ssl.CertificateError
- else:
- assert False
-
- self.loop.create_connection = create_connection
+ # Called on connection to http://proxy.example.com
+ self.loop.create_connection = make_mocked_coro((mock.Mock(), mock.Mock()))
+ # Called on connection to https://www.python.org
+ self.loop.start_tls = make_mocked_coro(raise_exception=ssl.CertificateError)
req = ClientRequest(
"GET",
@@ -353,75 +340,12 @@ async def make_conn():
]
)
- seq = 0
-
- async def create_connection(*args, **kwargs):
- nonlocal seq
- seq += 1
-
- # connection to http://proxy.example.com
- if seq == 1:
- return mock.Mock(), mock.Mock()
- # connection to https://www.python.org
- elif seq == 2:
- raise ssl.SSLError
- else:
- assert False
-
- self.loop.create_connection = create_connection
-
- req = ClientRequest(
- "GET",
- URL("https://www.python.org"),
- proxy=URL("http://proxy.example.com"),
- loop=self.loop,
+ # Called on connection to http://proxy.example.com
+ self.loop.create_connection = make_mocked_coro(
+ (mock.Mock(), mock.Mock()),
)
- with self.assertRaises(aiohttp.ClientConnectorSSLError):
- self.loop.run_until_complete(
- connector._create_connection(req, None, aiohttp.ClientTimeout())
- )
-
- @mock.patch("aiohttp.connector.ClientRequest")
- def test_https_connect_runtime_error(self, ClientRequestMock: Any) -> None:
- proxy_req = ClientRequest(
- "GET", URL("http://proxy.example.com"), loop=self.loop
- )
- ClientRequestMock.return_value = proxy_req
-
- proxy_resp = ClientResponse(
- "get",
- URL("http://proxy.example.com"),
- request_info=mock.Mock(),
- writer=mock.Mock(),
- continue100=None,
- timer=TimerNoop(),
- traces=[],
- loop=self.loop,
- session=mock.Mock(),
- )
- proxy_req.send = make_mocked_coro(proxy_resp)
- proxy_resp.start = make_mocked_coro(mock.Mock(status=200))
-
- async def make_conn():
- return aiohttp.TCPConnector()
-
- connector = self.loop.run_until_complete(make_conn())
- connector._resolve_host = make_mocked_coro(
- [
- {
- "hostname": "hostname",
- "host": "127.0.0.1",
- "port": 80,
- "family": socket.AF_INET,
- "proto": 0,
- "flags": 0,
- }
- ]
- )
-
- tr, proto = mock.Mock(), mock.Mock()
- tr.get_extra_info.return_value = None
- self.loop.create_connection = make_mocked_coro((tr, proto))
+ # Called on connection to https://www.python.org
+ self.loop.start_tls = make_mocked_coro(raise_exception=ssl.SSLError)
req = ClientRequest(
"GET",
@@ -429,17 +353,11 @@ async def make_conn():
proxy=URL("http://proxy.example.com"),
loop=self.loop,
)
- with self.assertRaisesRegex(
- RuntimeError, "Transport does not expose socket instance"
- ):
+ with self.assertRaises(aiohttp.ClientConnectorSSLError):
self.loop.run_until_complete(
connector._create_connection(req, None, aiohttp.ClientTimeout())
)
- self.loop.run_until_complete(proxy_req.close())
- proxy_resp.close()
- self.loop.run_until_complete(req.close())
-
@mock.patch("aiohttp.connector.ClientRequest")
def test_https_connect_http_proxy_error(self, ClientRequestMock: Any) -> None:
proxy_req = ClientRequest(
@@ -650,6 +568,7 @@ async def make_conn():
tr, proto = mock.Mock(), mock.Mock()
self.loop.create_connection = make_mocked_coro((tr, proto))
+ self.loop.start_tls = make_mocked_coro(mock.Mock())
req = ClientRequest(
"GET",
@@ -661,18 +580,17 @@ async def make_conn():
connector._create_connection(req, None, aiohttp.ClientTimeout())
)
- self.loop.create_connection.assert_called_with(
+ self.loop.start_tls.assert_called_with(
+ mock.ANY,
mock.ANY,
- ssl=connector._make_ssl_context(True),
- sock=mock.ANY,
+ connector._make_ssl_context(True),
server_hostname="www.python.org",
+ ssl_handshake_timeout=mock.ANY,
)
self.assertEqual(req.url.path, "/")
self.assertEqual(proxy_req.method, "CONNECT")
self.assertEqual(proxy_req.url, URL("https://www.python.org"))
- tr.close.assert_called_once_with()
- tr.get_extra_info.assert_called_with("socket", default=None)
self.loop.run_until_complete(proxy_req.close())
proxy_resp.close()
@@ -721,6 +639,7 @@ async def make_conn():
tr, proto = mock.Mock(), mock.Mock()
self.loop.create_connection = make_mocked_coro((tr, proto))
+ self.loop.start_tls = make_mocked_coro(mock.Mock())
self.assertIn("AUTHORIZATION", proxy_req.headers)
self.assertNotIn("PROXY-AUTHORIZATION", proxy_req.headers)
diff --git a/tests/test_proxy_functional.py b/tests/test_proxy_functional.py
index e1c3c0095e7..ab3f5986ac9 100644
--- a/tests/test_proxy_functional.py
+++ b/tests/test_proxy_functional.py
@@ -1,15 +1,217 @@
# type: ignore
import asyncio
+import functools
import os
import pathlib
+import platform
+from re import match as match_regex
from typing import Any
from unittest import mock
+from uuid import uuid4
+import proxy
import pytest
from yarl import URL
import aiohttp
from aiohttp import web
+from aiohttp.client_exceptions import ClientConnectionError, ClientProxyConnectionError
+from aiohttp.helpers import PY_310
+
+secure_proxy_xfail_under_py310_except_macos = functools.partial(
+ pytest.mark.xfail,
+ PY_310 and platform.system() != "Darwin",
+ reason=(
+ "The secure proxy fixture does not seem to work "
+ "under Python 3.10 on Linux or Windows. "
+ "See https://github.com/abhinavsingh/proxy.py/issues/622."
+ ),
+)
+
+ASYNCIO_SUPPORTS_TLS_IN_TLS = hasattr(
+ asyncio.sslproto._SSLProtocolTransport,
+ "_start_tls_compatible",
+)
+
+
+@pytest.fixture
+def secure_proxy_url(monkeypatch, tls_certificate_pem_path):
+ """Return the URL of an instance of a running secure proxy.
+
+ This fixture also spawns that instance and tears it down after the test.
+ """
+ proxypy_args = [
+ "--threadless", # use asyncio
+ "--num-workers",
+ "1", # the tests only send one query anyway
+ "--hostname",
+ "127.0.0.1", # network interface to listen to
+ "--port",
+ 0, # ephemeral port, so that kernel allocates a free one
+ "--cert-file",
+ tls_certificate_pem_path, # contains both key and cert
+ "--key-file",
+ tls_certificate_pem_path, # contains both key and cert
+ ]
+
+ class PatchedAccetorPool(proxy.core.acceptor.AcceptorPool):
+ def listen(self):
+ super().listen()
+ self.socket_host, self.socket_port = self.socket.getsockname()[:2]
+
+ monkeypatch.setattr(proxy.proxy, "AcceptorPool", PatchedAccetorPool)
+
+ with proxy.Proxy(input_args=proxypy_args) as proxy_instance:
+ yield URL.build(
+ scheme="https",
+ host=proxy_instance.acceptors.socket_host,
+ port=proxy_instance.acceptors.socket_port,
+ )
+
+
+@pytest.fixture
+def web_server_endpoint_payload():
+ return str(uuid4())
+
+
+@pytest.fixture(params=("http", "https"))
+def web_server_endpoint_type(request):
+ return request.param
+
+
+@pytest.fixture
+async def web_server_endpoint_url(
+ aiohttp_server,
+ ssl_ctx,
+ web_server_endpoint_payload,
+ web_server_endpoint_type,
+):
+ server_kwargs = (
+ {
+ "ssl": ssl_ctx,
+ }
+ if web_server_endpoint_type == "https"
+ else {}
+ )
+
+ async def handler(*args, **kwargs):
+ return web.Response(text=web_server_endpoint_payload)
+
+ app = web.Application()
+ app.router.add_route("GET", "/", handler)
+ server = await aiohttp_server(app, **server_kwargs)
+
+ return URL.build(
+ scheme=web_server_endpoint_type,
+ host=server.host,
+ port=server.port,
+ )
+
+
+@pytest.fixture
+def _pretend_asyncio_supports_tls_in_tls(
+ monkeypatch,
+ web_server_endpoint_type,
+):
+ if web_server_endpoint_type != "https" or ASYNCIO_SUPPORTS_TLS_IN_TLS:
+ return
+
+ # for https://github.com/python/cpython/pull/28073
+ # and https://bugs.python.org/issue37179
+ monkeypatch.setattr(
+ asyncio.sslproto._SSLProtocolTransport,
+ "_start_tls_compatible",
+ True,
+ raising=False,
+ )
+
+
+@secure_proxy_xfail_under_py310_except_macos(raises=ClientProxyConnectionError)
+@pytest.mark.parametrize("web_server_endpoint_type", ("http", "https"))
+@pytest.mark.usefixtures("_pretend_asyncio_supports_tls_in_tls", "loop")
+async def test_secure_https_proxy_absolute_path(
+ client_ssl_ctx,
+ secure_proxy_url,
+ web_server_endpoint_url,
+ web_server_endpoint_payload,
+) -> None:
+ """Ensure HTTP(S) sites are accessible through a secure proxy."""
+ conn = aiohttp.TCPConnector()
+ sess = aiohttp.ClientSession(connector=conn)
+
+ response = await sess.get(
+ web_server_endpoint_url,
+ proxy=secure_proxy_url,
+ ssl=client_ssl_ctx, # used for both proxy and endpoint connections
+ )
+
+ assert response.status == 200
+ assert await response.text() == web_server_endpoint_payload
+
+ response.close()
+ await sess.close()
+ await conn.close()
+
+
+@secure_proxy_xfail_under_py310_except_macos(raises=AssertionError)
+@pytest.mark.parametrize("web_server_endpoint_type", ("https",))
+@pytest.mark.usefixtures("loop")
+async def test_https_proxy_unsupported_tls_in_tls(
+ client_ssl_ctx,
+ secure_proxy_url,
+ web_server_endpoint_type,
+) -> None:
+ """Ensure connecting to TLS endpoints w/ HTTPS proxy needs patching.
+
+ This also checks that a helpful warning on how to patch the env
+ is displayed.
+ """
+ url = URL.build(scheme=web_server_endpoint_type, host="python.org")
+
+ escaped_host_port = ":".join((url.host.replace(".", r"\."), str(url.port)))
+ escaped_proxy_url = str(secure_proxy_url).replace(".", r"\.")
+
+ conn = aiohttp.TCPConnector()
+ sess = aiohttp.ClientSession(connector=conn)
+
+ expected_warning_text = (
+ r"^"
+ r"An HTTPS request is being sent through an HTTPS proxy\. "
+ "This support for TLS in TLS is known to be disabled "
+ r"in the stdlib asyncio\. This is why you'll probably see "
+ r"an error in the log below\.\n\n"
+ "It is possible to enable it via monkeypatching under "
+ r"Python 3\.7 or higher\. For more details, see:\n"
+ r"\* https://bugs\.python\.org/issue37179\n"
+ r"\* https://github\.com/python/cpython/pull/28073\n\n"
+ r"You can temporarily patch this as follows:\n"
+ r"\* https://docs\.aiohttp\.org/en/stable/client_advanced\.html#proxy-support\n"
+ r"\* https://github\.com/aio-libs/aiohttp/discussions/6044\n$"
+ )
+ type_err = (
+ r"transport is not supported by start_tls\(\)"
+ )
+ expected_exception_reason = (
+ r"^"
+ "Cannot initialize a TLS-in-TLS connection to host "
+ f"{escaped_host_port!s} through an underlying connection "
+ f"to an HTTPS proxy {escaped_proxy_url!s} ssl:{client_ssl_ctx!s} "
+ f"[{type_err!s}]"
+ r"$"
+ )
+
+ with pytest.warns(RuntimeWarning, match=expected_warning_text,), pytest.raises(
+ ClientConnectionError,
+ match=expected_exception_reason,
+ ) as conn_err:
+ await sess.get(url, proxy=secure_proxy_url, ssl=client_ssl_ctx)
+
+ assert type(conn_err.value.__cause__) == TypeError
+ assert match_regex(f"^{type_err!s}$", str(conn_err.value.__cause__))
+
+ await sess.close()
+ await conn.close()
@pytest.fixture
diff --git a/tests/test_resolver.py b/tests/test_resolver.py
index b74764525c2..3b984cb46c4 100644
--- a/tests/test_resolver.py
+++ b/tests/test_resolver.py
@@ -44,7 +44,7 @@ async def fake(*args: Any, **kwargs: Any) -> List[Any]:
if not hosts:
raise socket.gaierror
- return list([(None, None, None, None, [h, 0]) for h in hosts])
+ return [(None, None, None, None, [h, 0]) for h in hosts]
return fake
diff --git a/tests/test_web_app.py b/tests/test_web_app.py
index 91b4f69274c..12c40293793 100644
--- a/tests/test_web_app.py
+++ b/tests/test_web_app.py
@@ -7,6 +7,7 @@
from aiohttp import log, web
from aiohttp.test_utils import make_mocked_coro
+from aiohttp.typedefs import Handler
async def test_app_ctor() -> None:
@@ -137,7 +138,7 @@ def test_app_run_middlewares() -> None:
root.freeze()
assert root._run_middlewares is False
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
return await handler(request)
root = web.Application(middlewares=[middleware])
diff --git a/tests/test_web_functional.py b/tests/test_web_functional.py
index d4be1724827..c54db16bdc2 100644
--- a/tests/test_web_functional.py
+++ b/tests/test_web_functional.py
@@ -17,6 +17,7 @@
from aiohttp import FormData, HttpVersion10, HttpVersion11, TraceConfig, multipart, web
from aiohttp.hdrs import CONTENT_LENGTH, CONTENT_TYPE, TRANSFER_ENCODING
from aiohttp.test_utils import make_mocked_coro
+from aiohttp.typedefs import Handler
try:
import ssl
@@ -444,7 +445,7 @@ async def handler(request):
def test_repr_for_application() -> None:
app = web.Application()
- assert "".format(id(app)) == repr(app)
+ assert f"" == repr(app)
async def test_expect_default_handler_unknown(aiohttp_client: Any) -> None:
@@ -1213,7 +1214,7 @@ async def handler(request):
with pytest.warns(DeprecationWarning, match="Middleware decorator is deprecated"):
@web.middleware
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
order.append((1, request.app["name"]))
resp = await handler(request)
assert 200 == resp.status
@@ -1353,7 +1354,7 @@ async def test_subapp_middleware_context(
values = []
def show_app_context(appname):
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
values.append("{}: {}".format(appname, request.app["my_value"]))
return await handler(request)
diff --git a/tests/test_web_log.py b/tests/test_web_log.py
index b834f87310b..fa5fb27f744 100644
--- a/tests/test_web_log.py
+++ b/tests/test_web_log.py
@@ -10,6 +10,7 @@
import aiohttp
from aiohttp import web
from aiohttp.abc import AbstractAccessLogger, AbstractAsyncAccessLogger
+from aiohttp.typedefs import Handler
from aiohttp.web_log import AccessLogger
from aiohttp.web_response import Response
@@ -232,7 +233,7 @@ async def test_contextvars_logger(aiohttp_server: Any, aiohttp_client: Any):
async def handler(request):
return web.Response()
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
VAR.set("uuid")
return await handler(request)
diff --git a/tests/test_web_middleware.py b/tests/test_web_middleware.py
index 33343b65dee..cfd85b5f95e 100644
--- a/tests/test_web_middleware.py
+++ b/tests/test_web_middleware.py
@@ -5,13 +5,14 @@
from yarl import URL
from aiohttp import web
+from aiohttp.typedefs import Handler
async def test_middleware_modifies_response(loop: Any, aiohttp_client: Any) -> None:
async def handler(request):
return web.Response(body=b"OK")
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
resp = await handler(request)
assert 200 == resp.status
resp.set_status(201)
@@ -32,7 +33,7 @@ async def test_middleware_handles_exception(loop: Any, aiohttp_client: Any) -> N
async def handler(request):
raise RuntimeError("Error text")
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
with pytest.raises(RuntimeError) as ctx:
await handler(request)
return web.Response(status=501, text=str(ctx.value) + "[MIDDLEWARE]")
@@ -59,7 +60,7 @@ async def handler2(request):
middleware_annotation_seen_values = []
def make_middleware(num):
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
middleware_annotation_seen_values.append(
getattr(handler, "annotation", None)
)
@@ -104,7 +105,7 @@ async def handler(request):
middleware_annotation_seen_values = []
def make_middleware(num):
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
annotation = getattr(handler, "annotation", None)
if annotation is not None:
middleware_annotation_seen_values.append(f"{annotation}/{num}")
@@ -418,7 +419,7 @@ async def view_handler(request):
with pytest.warns(DeprecationWarning, match="Middleware decorator is deprecated"):
@web.middleware
- async def middleware(request, handler):
+ async def middleware(request, handler: Handler):
resp = await handler(request)
assert 200 == resp.status
resp.set_status(201)
@@ -439,24 +440,21 @@ async def handler(request):
return web.Response(body=b"OK")
class Middleware:
- async def __call__(self, request, handler):
+ async def __call__(self, request, handler: Handler):
resp = await handler(request)
assert 200 == resp.status
resp.set_status(201)
resp.text = resp.text + "[new style middleware]"
return resp
- with pytest.warns(None) as warning_checker:
- app = web.Application()
- app.middlewares.append(Middleware())
- app.router.add_route("GET", "/", handler)
- client = await aiohttp_client(app)
- resp = await client.get("/")
- assert 201 == resp.status
- txt = await resp.text()
- assert "OK[new style middleware]" == txt
-
- assert len(warning_checker) == 0
+ app = web.Application()
+ app.middlewares.append(Middleware())
+ app.router.add_route("GET", "/", handler)
+ client = await aiohttp_client(app)
+ resp = await client.get("/")
+ assert 201 == resp.status
+ txt = await resp.text()
+ assert "OK[new style middleware]" == txt
async def test_new_style_middleware_method(loop: Any, aiohttp_client: Any) -> None:
@@ -464,21 +462,18 @@ async def handler(request):
return web.Response(body=b"OK")
class Middleware:
- async def call(self, request, handler):
+ async def call(self, request, handler: Handler):
resp = await handler(request)
assert 200 == resp.status
resp.set_status(201)
resp.text = resp.text + "[new style middleware]"
return resp
- with pytest.warns(None) as warning_checker:
- app = web.Application()
- app.middlewares.append(Middleware().call)
- app.router.add_route("GET", "/", handler)
- client = await aiohttp_client(app)
- resp = await client.get("/")
- assert 201 == resp.status
- txt = await resp.text()
- assert "OK[new style middleware]" == txt
-
- assert len(warning_checker) == 0
+ app = web.Application()
+ app.middlewares.append(Middleware().call)
+ app.router.add_route("GET", "/", handler)
+ client = await aiohttp_client(app)
+ resp = await client.get("/")
+ assert 201 == resp.status
+ txt = await resp.text()
+ assert "OK[new style middleware]" == txt
diff --git a/tests/test_web_response.py b/tests/test_web_response.py
index d37c8e8cc27..ddda01aae7d 100644
--- a/tests/test_web_response.py
+++ b/tests/test_web_response.py
@@ -3,6 +3,7 @@
import datetime
import gzip
import json
+import re
import weakref
from concurrent.futures import ThreadPoolExecutor
from typing import Any, Optional
@@ -15,6 +16,7 @@
from aiohttp import HttpVersion, HttpVersion10, HttpVersion11, hdrs
from aiohttp.helpers import ETag
+from aiohttp.http_writer import _serialize_headers
from aiohttp.payload import BytesPayload
from aiohttp.test_utils import make_mocked_coro, make_mocked_request
from aiohttp.web import ContentCoding, Response, StreamResponse, json_response
@@ -59,12 +61,7 @@ def write(chunk):
buf.extend(chunk)
async def write_headers(status_line, headers):
- headers = (
- status_line
- + "\r\n"
- + "".join([k + ": " + v + "\r\n" for k, v in headers.items()])
- )
- headers = headers.encode("utf-8") + b"\r\n"
+ headers = _serialize_headers(status_line, headers)
buf.extend(headers)
async def write_eof(chunk=b""):
@@ -1171,3 +1168,34 @@ def test_text_is_json_encoded(self) -> None:
def test_content_type_is_overrideable(self) -> None:
resp = json_response({"foo": 42}, content_type="application/vnd.json+api")
assert "application/vnd.json+api" == resp.content_type
+
+
+@pytest.mark.dev_mode
+async def test_no_warn_small_cookie(buf: Any, writer: Any) -> None:
+ resp = Response()
+ resp.set_cookie("foo", "ÿ" + "8" * 4064, max_age=2600) # No warning
+ req = make_request("GET", "/", writer=writer)
+
+ await resp.prepare(req)
+ await resp.write_eof()
+
+ cookie = re.search(b"Set-Cookie: (.*?)\r\n", buf).group(1)
+ assert len(cookie) == 4096
+
+
+@pytest.mark.dev_mode
+async def test_warn_large_cookie(buf: Any, writer: Any) -> None:
+ resp = Response()
+
+ with pytest.warns(
+ UserWarning,
+ match="The size of is too large, it might get ignored by the client.",
+ ):
+ resp.set_cookie("foo", "ÿ" + "8" * 4065, max_age=2600)
+ req = make_request("GET", "/", writer=writer)
+
+ await resp.prepare(req)
+ await resp.write_eof()
+
+ cookie = re.search(b"Set-Cookie: (.*?)\r\n", buf).group(1)
+ assert len(cookie) == 4097
diff --git a/tests/test_worker.py b/tests/test_worker.py
index 317945f895a..5f973179228 100644
--- a/tests/test_worker.py
+++ b/tests/test_worker.py
@@ -250,7 +250,7 @@ def test__create_ssl_context_without_certs_and_ciphers(
worker,
tls_certificate_pem_path,
) -> None:
- worker.cfg.ssl_version = ssl.PROTOCOL_SSLv23
+ worker.cfg.ssl_version = ssl.PROTOCOL_TLS_CLIENT
worker.cfg.cert_reqs = ssl.CERT_OPTIONAL
worker.cfg.certfile = tls_certificate_pem_path
worker.cfg.keyfile = tls_certificate_pem_path
@@ -264,7 +264,7 @@ def test__create_ssl_context_with_ciphers(
worker,
tls_certificate_pem_path,
) -> None:
- worker.cfg.ssl_version = ssl.PROTOCOL_SSLv23
+ worker.cfg.ssl_version = ssl.PROTOCOL_TLS_CLIENT
worker.cfg.cert_reqs = ssl.CERT_OPTIONAL
worker.cfg.certfile = tls_certificate_pem_path
worker.cfg.keyfile = tls_certificate_pem_path
@@ -279,7 +279,7 @@ def test__create_ssl_context_with_ca_certs(
tls_ca_certificate_pem_path,
tls_certificate_pem_path,
) -> None:
- worker.cfg.ssl_version = ssl.PROTOCOL_SSLv23
+ worker.cfg.ssl_version = ssl.PROTOCOL_TLS_CLIENT
worker.cfg.cert_reqs = ssl.CERT_OPTIONAL
worker.cfg.certfile = tls_certificate_pem_path
worker.cfg.keyfile = tls_certificate_pem_path
diff --git a/tools/bench-asyncio-write.py b/tools/bench-asyncio-write.py
index 6b34782d909..8535219fe55 100644
--- a/tools/bench-asyncio-write.py
+++ b/tools/bench-asyncio-write.py
@@ -3,6 +3,7 @@
import math
import os
import signal
+from typing import List, Tuple
PORT = 8888
@@ -38,7 +39,7 @@ def fm_size(s, _fms=("", "K", "M", "G")):
while s >= 1024:
s /= 1024
i += 1
- return "{:.0f}{}B".format(s, _fms[i])
+ return f"{s:.0f}{_fms[i]}B"
def fm_time(s, _fms=("", "m", "µ", "n")):
@@ -48,7 +49,14 @@ def fm_time(s, _fms=("", "m", "µ", "n")):
while s < 1:
s *= 1000
i += 1
- return "{:.2f}{}s".format(s, _fms[i])
+ return f"{s:.2f}{_fms[i]}s"
+
+
+def _job(j: List[int]) -> Tuple[str, List[bytes]]:
+ # Always start with a 256B headers chunk
+ body = [b"0" * s for s in [256] + list(j)]
+ job_title = f"{fm_size(sum(j))} / {len(j)}"
+ return (job_title, body)
writes = [
@@ -71,14 +79,8 @@ def fm_time(s, _fms=("", "m", "µ", "n")):
[10 * 2 ** 27 for _ in range(5)],
)
-jobs = [
- (
- # always start with a 256B headers chunk
- "{} / {}".format(fm_size(sum(j) if j else 0), len(j)),
- [b"0" * s for s in [256] + list(j)],
- )
- for j in bodies
-]
+
+jobs = [_job(j) for j in bodies]
async def time(loop, fn, *args):
@@ -111,7 +113,7 @@ async def bench(job_title, w, body, base=None):
fm_time(mean),
fm_time(sd),
str(it),
- "{:.2%}".format(mean / base - 1) if base is not None else "",
+ f"{mean / base - 1:.2%}" if base is not None else "",
)
)
return mean
diff --git a/tools/check_changes.py b/tools/check_changes.py
index 77ae2431051..cd488bebc9a 100755
--- a/tools/check_changes.py
+++ b/tools/check_changes.py
@@ -4,6 +4,7 @@
from pathlib import Path
ALLOWED_SUFFIXES = [".feature", ".bugfix", ".doc", ".removal", ".misc"]
+ALLOWED_SUFFIXES += [f"{suffix}.rst" for suffix in ALLOWED_SUFFIXES]
def get_root(script_path):
@@ -22,7 +23,7 @@ def main(argv):
changes = root / "CHANGES"
failed = False
for fname in changes.iterdir():
- if fname.name in (".gitignore", ".TEMPLATE.rst"):
+ if fname.name in (".gitignore", ".TEMPLATE.rst", "README.rst"):
continue
if fname.suffix not in ALLOWED_SUFFIXES:
if not failed:
diff --git a/tools/gen.py b/tools/gen.py
index fa916a8d925..d00780aa676 100755
--- a/tools/gen.py
+++ b/tools/gen.py
@@ -143,7 +143,7 @@ def gen(dct):
out.write(HEADER)
missing = set()
gen_block(dct, "", set(), missing, out)
- missing_labels = "\n".join(m for m in sorted(missing))
+ missing_labels = "\n".join(sorted(missing))
out.write(FOOTER.format(missing=missing_labels))
return out